repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/google-cloud-java
36,704
java-cloudsupport/proto-google-cloud-cloudsupport-v2beta/src/main/java/com/google/cloud/support/v2beta/UpdateCaseRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/support/v2beta/case_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.support.v2beta; /** * * * <pre> * The request message for the UpdateCase endpoint * </pre> * * Protobuf type {@code google.cloud.support.v2beta.UpdateCaseRequest} */ public final class UpdateCaseRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.support.v2beta.UpdateCaseRequest) UpdateCaseRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateCaseRequest.newBuilder() to construct. private UpdateCaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateCaseRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateCaseRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_UpdateCaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_UpdateCaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.UpdateCaseRequest.class, com.google.cloud.support.v2beta.UpdateCaseRequest.Builder.class); } private int bitField0_; public static final int CASE_FIELD_NUMBER = 1; private com.google.cloud.support.v2beta.Case case_; /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the case field is set. */ @java.lang.Override public boolean hasCase() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The case. */ @java.lang.Override public com.google.cloud.support.v2beta.Case getCase() { return case_ == null ? com.google.cloud.support.v2beta.Case.getDefaultInstance() : case_; } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.support.v2beta.CaseOrBuilder getCaseOrBuilder() { return case_ == null ? com.google.cloud.support.v2beta.Case.getDefaultInstance() : case_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCase()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCase()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.support.v2beta.UpdateCaseRequest)) { return super.equals(obj); } com.google.cloud.support.v2beta.UpdateCaseRequest other = (com.google.cloud.support.v2beta.UpdateCaseRequest) obj; if (hasCase() != other.hasCase()) return false; if (hasCase()) { if (!getCase().equals(other.getCase())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCase()) { hash = (37 * hash) + CASE_FIELD_NUMBER; hash = (53 * hash) + getCase().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.UpdateCaseRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.support.v2beta.UpdateCaseRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for the UpdateCase endpoint * </pre> * * Protobuf type {@code google.cloud.support.v2beta.UpdateCaseRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.support.v2beta.UpdateCaseRequest) com.google.cloud.support.v2beta.UpdateCaseRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_UpdateCaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_UpdateCaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.UpdateCaseRequest.class, com.google.cloud.support.v2beta.UpdateCaseRequest.Builder.class); } // Construct using com.google.cloud.support.v2beta.UpdateCaseRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCaseFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; case_ = null; if (caseBuilder_ != null) { caseBuilder_.dispose(); caseBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_UpdateCaseRequest_descriptor; } @java.lang.Override public com.google.cloud.support.v2beta.UpdateCaseRequest getDefaultInstanceForType() { return com.google.cloud.support.v2beta.UpdateCaseRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.support.v2beta.UpdateCaseRequest build() { com.google.cloud.support.v2beta.UpdateCaseRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.support.v2beta.UpdateCaseRequest buildPartial() { com.google.cloud.support.v2beta.UpdateCaseRequest result = new com.google.cloud.support.v2beta.UpdateCaseRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.support.v2beta.UpdateCaseRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.case_ = caseBuilder_ == null ? case_ : caseBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.support.v2beta.UpdateCaseRequest) { return mergeFrom((com.google.cloud.support.v2beta.UpdateCaseRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.support.v2beta.UpdateCaseRequest other) { if (other == com.google.cloud.support.v2beta.UpdateCaseRequest.getDefaultInstance()) return this; if (other.hasCase()) { mergeCase(other.getCase()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCaseFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.support.v2beta.Case case_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.support.v2beta.Case, com.google.cloud.support.v2beta.Case.Builder, com.google.cloud.support.v2beta.CaseOrBuilder> caseBuilder_; /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the case field is set. */ public boolean hasCase() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The case. */ public com.google.cloud.support.v2beta.Case getCase() { if (caseBuilder_ == null) { return case_ == null ? com.google.cloud.support.v2beta.Case.getDefaultInstance() : case_; } else { return caseBuilder_.getMessage(); } } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCase(com.google.cloud.support.v2beta.Case value) { if (caseBuilder_ == null) { if (value == null) { throw new NullPointerException(); } case_ = value; } else { caseBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCase(com.google.cloud.support.v2beta.Case.Builder builderForValue) { if (caseBuilder_ == null) { case_ = builderForValue.build(); } else { caseBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCase(com.google.cloud.support.v2beta.Case value) { if (caseBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && case_ != null && case_ != com.google.cloud.support.v2beta.Case.getDefaultInstance()) { getCaseBuilder().mergeFrom(value); } else { case_ = value; } } else { caseBuilder_.mergeFrom(value); } if (case_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCase() { bitField0_ = (bitField0_ & ~0x00000001); case_ = null; if (caseBuilder_ != null) { caseBuilder_.dispose(); caseBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.support.v2beta.Case.Builder getCaseBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCaseFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.support.v2beta.CaseOrBuilder getCaseOrBuilder() { if (caseBuilder_ != null) { return caseBuilder_.getMessageOrBuilder(); } else { return case_ == null ? com.google.cloud.support.v2beta.Case.getDefaultInstance() : case_; } } /** * * * <pre> * Required. The case to update. * </pre> * * <code>.google.cloud.support.v2beta.Case case = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.support.v2beta.Case, com.google.cloud.support.v2beta.Case.Builder, com.google.cloud.support.v2beta.CaseOrBuilder> getCaseFieldBuilder() { if (caseBuilder_ == null) { caseBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.support.v2beta.Case, com.google.cloud.support.v2beta.Case.Builder, com.google.cloud.support.v2beta.CaseOrBuilder>( getCase(), getParentForChildren(), isClean()); case_ = null; } return caseBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * A list of attributes of the case that should be updated. Supported values * are `priority`, `display_name`, and `subscriber_email_addresses`. If no * fields are specified, all supported fields are updated. * * Be careful - if you do not provide a field mask, then you might * accidentally clear some fields. For example, if you leave the field mask * empty and do not provide a value for `subscriber_email_addresses`, then * `subscriber_email_addresses` is updated to empty. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.support.v2beta.UpdateCaseRequest) } // @@protoc_insertion_point(class_scope:google.cloud.support.v2beta.UpdateCaseRequest) private static final com.google.cloud.support.v2beta.UpdateCaseRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.support.v2beta.UpdateCaseRequest(); } public static com.google.cloud.support.v2beta.UpdateCaseRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateCaseRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateCaseRequest>() { @java.lang.Override public UpdateCaseRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateCaseRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateCaseRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.support.v2beta.UpdateCaseRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/nomulus
37,033
core/src/main/java/google/registry/beam/rde/RdePipeline.java
// Copyright 2021 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.beam.rde; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static google.registry.beam.rde.RdePipeline.TupleTags.DOMAIN_FRAGMENTS; import static google.registry.beam.rde.RdePipeline.TupleTags.EXTERNAL_HOST_FRAGMENTS; import static google.registry.beam.rde.RdePipeline.TupleTags.HOST_TO_PENDING_DEPOSIT; import static google.registry.beam.rde.RdePipeline.TupleTags.PENDING_DEPOSIT; import static google.registry.beam.rde.RdePipeline.TupleTags.REFERENCED_CONTACTS; import static google.registry.beam.rde.RdePipeline.TupleTags.REFERENCED_HOSTS; import static google.registry.beam.rde.RdePipeline.TupleTags.REVISION_ID; import static google.registry.beam.rde.RdePipeline.TupleTags.SUPERORDINATE_DOMAINS; import static google.registry.model.reporting.HistoryEntryDao.RESOURCE_TYPES_TO_HISTORY_TYPES; import static google.registry.persistence.transaction.TransactionManagerFactory.tm; import static google.registry.util.SafeSerializationUtils.safeDeserializeCollection; import static google.registry.util.SafeSerializationUtils.serializeCollection; import static google.registry.util.SerializeUtils.decodeBase64; import static google.registry.util.SerializeUtils.encodeBase64; import static org.apache.beam.sdk.values.TypeDescriptors.kvs; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.common.collect.Streams; import com.google.common.flogger.FluentLogger; import com.google.common.io.BaseEncoding; import dagger.BindsInstance; import dagger.Component; import google.registry.batch.CloudTasksUtils; import google.registry.beam.common.RegistryJpaIO; import google.registry.beam.common.RegistryPipelineOptions; import google.registry.config.CloudTasksUtilsModule; import google.registry.config.CredentialModule; import google.registry.config.RegistryConfig.ConfigModule; import google.registry.gcs.GcsUtils; import google.registry.model.EppResource; import google.registry.model.contact.Contact; import google.registry.model.contact.ContactHistory; import google.registry.model.domain.Domain; import google.registry.model.domain.DomainHistory; import google.registry.model.host.Host; import google.registry.model.host.HostHistory; import google.registry.model.rde.RdeMode; import google.registry.model.registrar.Registrar; import google.registry.model.registrar.Registrar.Type; import google.registry.model.reporting.HistoryEntry; import google.registry.model.reporting.HistoryEntry.HistoryEntryId; import google.registry.persistence.PersistenceModule.TransactionIsolationLevel; import google.registry.persistence.VKey; import google.registry.rde.DepositFragment; import google.registry.rde.PendingDeposit; import google.registry.rde.PendingDeposit.PendingDepositCoder; import google.registry.rde.RdeMarshaller; import google.registry.util.UtilsModule; import google.registry.xml.ValidationMode; import jakarta.inject.Inject; import jakarta.inject.Singleton; import java.io.IOException; import java.io.Serializable; import java.util.HashSet; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VarLongCoder; import org.apache.beam.sdk.metrics.Counter; import org.apache.beam.sdk.metrics.Metrics; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.Filter; import org.apache.beam.sdk.transforms.FlatMapElements; import org.apache.beam.sdk.transforms.Flatten; import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.join.CoGbkResult; import org.apache.beam.sdk.transforms.join.CoGroupByKey; import org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; import org.apache.beam.sdk.values.TypeDescriptor; import org.joda.time.DateTime; /** * Definition of a Dataflow Flex template, which generates RDE/BRDA deposits. * * <p>To stage this template locally, run {@code ./nom_build :core:sBP --environment=alpha * --pipeline=rde}. * * <p>Then, you can run the staged template via the API client library, gCloud or a raw REST call. * * <p>This pipeline only works for pending deposits with the same watermark, the {@link * google.registry.rde.RdeStagingAction} will batch such pending deposits together and launch * multiple pipelines if multiple watermarks exist. * * <p>The pipeline is broadly divided into two parts -- creating the {@link DepositFragment}s, and * processing them. * * <h2>Creating {@link DepositFragment}</h2> * * <h3>{@link Registrar}</h3> * * Non-test registrar entities are loaded from Cloud SQL and marshalled into deposit fragments. They * are <b>NOT</b> rewound to the watermark. * * <h3>{@link EppResource}</h3> * * All EPP resources are loaded from the corresponding {@link HistoryEntry}, which has the resource * embedded. In general, we find most recent history entry before watermark and filter out the ones * that are soft-deleted by watermark. The history is emitted as pairs of (resource repo ID: history * revision ID) from the SQL query. * * <h3>{@link Domain}</h3> * * After the most recent (live) domain resources are loaded from the corresponding history objects, * we marshall them to deposit fragments and emit the (pending deposit: deposit fragment) pairs for * further processing. We also find all the contacts and hosts referenced by a given domain and emit * pairs of (contact/host repo ID: pending deposit) for all RDE pending deposits for further * processing. * * <h3>{@link Contact}</h3> * * We first join most recent contact histories, represented by (contact repo ID: contact history * revision ID) pairs, with referenced contacts, represented by (contact repo ID: pending deposit) * pairs, on the contact repo ID, to remove unreferenced contact histories. Contact resources are * then loaded from the remaining referenced contact histories, and marshalled into (pending * deposit: deposit fragment) pairs. * * <h3>{@link Host}</h3> * * Similar to {@link Contact}, we join the most recent host history with referenced hosts to find * most recent referenced hosts. For external hosts we do the same treatment as we did on contacts * and obtain the (pending deposit: deposit fragment) pairs. For subordinate hosts, we need to find * the superordinate domain in order to properly handle pending transfer in the deposit as well. So * we first find the superordinate domain repo ID from the host and join the (superordinate domain * repo ID: (subordinate host repo ID: (pending deposit: revision ID))) pair with the (domain repo * ID: revision ID) pair obtained from the domain history query in order to map the host at * watermark to the domain at watermark. We then proceed to create the (pending deposit: deposit * fragment) pair for subordinate hosts using the added domain information. * * <h2>Processing {@link DepositFragment}</h2> * * The (pending deposit: deposit fragment) pairs from different resources are combined and grouped * by pending deposit. For each pending deposit, all the relevant deposit fragments are written into * an encrypted file stored on GCS. The filename is uniquely determined by the Beam job ID so there * is no need to lock the GCS write operation to prevent stomping. The cursor for staging the * pending deposit is then rolled forward, and the next action is enqueued. The latter two * operations are performed in a transaction so the cursor is rolled back if enqueueing failed. * * @see <a href="https://cloud.google.com/dataflow/docs/guides/templates/using-flex-templates">Using * Flex Templates</a> */ @Singleton public class RdePipeline implements Serializable { private static final long serialVersionUID = -4866795928854754666L; private final transient RdePipelineOptions options; private final ValidationMode mode; private final ImmutableSet<PendingDeposit> pendingDeposits; private final DateTime watermark; private final String rdeBucket; private final byte[] stagingKeyBytes; private final GcsUtils gcsUtils; private final CloudTasksUtils cloudTasksUtils; private final RdeMarshaller marshaller; // Registrars to be excluded from data escrow. Not including the sandbox-only OTE type so that // if sneaks into production we would get an extra signal. private static final ImmutableSet<Type> IGNORED_REGISTRAR_TYPES = Sets.immutableEnumSet(Registrar.Type.MONITORING, Registrar.Type.TEST); private static final FluentLogger logger = FluentLogger.forEnclosingClass(); @Inject RdePipeline(RdePipelineOptions options, GcsUtils gcsUtils, CloudTasksUtils cloudTasksUtils) { this.options = options; this.mode = ValidationMode.valueOf(options.getValidationMode()); this.pendingDeposits = decodePendingDeposits(options.getPendings()); ImmutableSet<DateTime> potentialWatermarks = pendingDeposits.stream() .map(PendingDeposit::watermark) .distinct() .collect(toImmutableSet()); checkArgument( potentialWatermarks.size() == 1, String.format( "RDE pipeline should only work on pending deposits " + "with the same watermark, but %d were given: %s", potentialWatermarks.size(), potentialWatermarks)); this.watermark = potentialWatermarks.asList().get(0); this.rdeBucket = options.getRdeStagingBucket(); this.stagingKeyBytes = BaseEncoding.base64Url().decode(options.getStagingKey()); this.gcsUtils = gcsUtils; this.cloudTasksUtils = cloudTasksUtils; this.marshaller = new RdeMarshaller(mode); } PipelineResult run() { Pipeline pipeline = Pipeline.create(options); PCollection<KV<PendingDeposit, Iterable<DepositFragment>>> fragments = createFragments(pipeline); persistData(fragments); return pipeline.run(); } PCollection<KV<PendingDeposit, Iterable<DepositFragment>>> createFragments(Pipeline pipeline) { PCollection<KV<PendingDeposit, DepositFragment>> registrarFragments = processRegistrars(pipeline); PCollection<KV<String, Long>> domainHistories = getMostRecentHistoryEntries(pipeline, DomainHistory.class); PCollection<KV<String, Long>> contactHistories = getMostRecentHistoryEntries(pipeline, ContactHistory.class); PCollection<KV<String, Long>> hostHistories = getMostRecentHistoryEntries(pipeline, HostHistory.class); PCollectionTuple processedDomainHistories = processDomainHistories(domainHistories); PCollection<KV<PendingDeposit, DepositFragment>> domainFragments = processedDomainHistories.get(DOMAIN_FRAGMENTS); PCollection<KV<PendingDeposit, DepositFragment>> contactFragments = processContactHistories( processedDomainHistories.get(REFERENCED_CONTACTS), contactHistories); PCollectionTuple processedHosts = processHostHistories(processedDomainHistories.get(REFERENCED_HOSTS), hostHistories); PCollection<KV<PendingDeposit, DepositFragment>> externalHostFragments = processedHosts.get(EXTERNAL_HOST_FRAGMENTS); PCollection<KV<PendingDeposit, DepositFragment>> subordinateHostFragments = processSubordinateHosts(processedHosts.get(SUPERORDINATE_DOMAINS), domainHistories); return PCollectionList.of(registrarFragments) .and(domainFragments) .and(contactFragments) .and(externalHostFragments) .and(subordinateHostFragments) .apply( "Combine PendingDeposit:DepositFragment pairs from all entities", Flatten.pCollections()) .setCoder(KvCoder.of(PendingDepositCoder.of(), SerializableCoder.of(DepositFragment.class))) .apply("Group DepositFragment by PendingDeposit", GroupByKey.create()); } void persistData(PCollection<KV<PendingDeposit, Iterable<DepositFragment>>> input) { input.apply( "Write to GCS, update cursors, and enqueue upload tasks", RdeIO.Write.builder() .setRdeBucket(rdeBucket) .setGcsUtils(gcsUtils) .setCloudTasksUtils(cloudTasksUtils) .setValidationMode(mode) .setStagingKeyBytes(stagingKeyBytes) .build()); } private PCollection<KV<PendingDeposit, DepositFragment>> processRegistrars(Pipeline pipeline) { // Note that the namespace in the metric is not being used by Stackdriver, it just has to be // non-empty. // See: // https://stackoverflow.com/questions/48530496/google-dataflow-custom-metrics-not-showing-on-stackdriver Counter includedRegistrarCounter = Metrics.counter("RDE", "IncludedRegistrar"); Counter registrarFragmentCounter = Metrics.counter("RDE", "RegistrarFragment"); return pipeline .apply( "Read all production Registrars", RegistryJpaIO.read( "SELECT registrarId FROM Registrar WHERE type NOT IN (:types)", ImmutableMap.of("types", IGNORED_REGISTRAR_TYPES), String.class, x -> x) .withCoder(StringUtf8Coder.of())) .apply( "Marshall Registrar into DepositFragment", FlatMapElements.into( kvs( TypeDescriptor.of(PendingDeposit.class), TypeDescriptor.of(DepositFragment.class))) .via( (String registrarRepoId) -> { VKey<Registrar> key = VKey.create(Registrar.class, registrarRepoId); includedRegistrarCounter.inc(); Registrar registrar = tm().transact(() -> tm().loadByKey(key)); DepositFragment fragment = marshaller.marshalRegistrar(registrar); ImmutableSet<KV<PendingDeposit, DepositFragment>> fragments = pendingDeposits.stream() .map(pending -> KV.of(pending, fragment)) .collect(toImmutableSet()); registrarFragmentCounter.inc(fragments.size()); return fragments; })); } /** * Load the most recent history entry before the watermark for a given history entry type. * * <p>Note that deleted and non-production resources are not included. * * @return A KV pair of (repoId, revisionId), used to reconstruct the composite key for the * history entry. */ private <T extends HistoryEntry> PCollection<KV<String, Long>> getMostRecentHistoryEntries( Pipeline pipeline, Class<T> historyClass) { return pipeline.apply( String.format("Load most recent %s", historyClass.getSimpleName()), RegistryJpaIO.read( ("SELECT repoId, revisionId FROM %entity% WHERE (repoId, modificationTime) IN" + " (SELECT repoId, MAX(modificationTime) FROM %entity% WHERE" + " modificationTime <= :watermark GROUP BY repoId) AND resource.deletionTime" + " > :watermark AND COALESCE(resource.creationRegistrarId, '') NOT LIKE" + " 'prober-%' AND COALESCE(resource.currentSponsorRegistrarId, '') NOT LIKE" + " 'prober-%' AND COALESCE(resource.lastEppUpdateRegistrarId, '') NOT LIKE" + " 'prober-%' " + (historyClass == DomainHistory.class ? "AND resource.tld IN " + "(SELECT id FROM Tld WHERE tldType = 'REAL')" : "")) .replace("%entity%", historyClass.getSimpleName()), ImmutableMap.of("watermark", watermark), Object[].class, row -> KV.of((String) row[0], (long) row[1])) .withCoder(KvCoder.of(StringUtf8Coder.of(), VarLongCoder.of()))); } private <T extends HistoryEntry> EppResource loadResourceByHistoryEntryId( Class<T> historyEntryClazz, String repoId, Iterable<Long> revisionIds) { ImmutableList<Long> ids = ImmutableList.copyOf(revisionIds); // The size should always be 1 because we are only getting one repo ID -> revision ID pair per // repo ID from the source transform (the JPA query in the method above). But for some reason // after CoGroupByKey (joining the revision IDs and the pending deposits on repo IDs), in // #removedUnreferencedResources, duplicate revision IDs are sometimes introduced. Here we // attempt to deduplicate the iterable. If it contains multiple revision IDs that are NOT the // same, we have a more serious problem as we cannot be sure which one to use. We should use the // highest revision ID, but we don't even know where it comes from, as the query should // definitively only give us one revision ID per repo ID. In this case we have to abort and // require manual intervention. if (ids.size() != 1) { ImmutableSet<Long> dedupedIds = ImmutableSet.copyOf(ids); checkState( dedupedIds.size() == 1, "Multiple unique revision IDs detected for %s repo ID %s: %s", historyEntryClazz.getSimpleName(), repoId, ids); logger.atSevere().log( "Duplicate revision IDs detected for %s repo ID %s: %s", historyEntryClazz.getSimpleName(), repoId, ids); } return loadResourceByHistoryEntryId(historyEntryClazz, repoId, ids.get(0)); } private <T extends HistoryEntry> EppResource loadResourceByHistoryEntryId( Class<T> historyEntryClazz, String repoId, long revisionId) { return tm().transact( () -> tm().loadByKey( VKey.create(historyEntryClazz, new HistoryEntryId(repoId, revisionId)))) .getResourceAtPointInTime() .map(resource -> resource.cloneProjectedAtTime(watermark)) .get(); } /** * Remove unreferenced resources by joining the (repoId, pendingDeposit) pair with the (repoId, * revisionId) on the repoId. * * <p>The (repoId, pendingDeposit) pairs denote resources (contact, host) that are referenced from * a domain, that are to be included in the corresponding pending deposit. * * <p>The (repoId, revisionId) paris come from the most recent history entry query, which can be * used to load the embedded resources themselves. * * @return a pair of (repoId, ([pendingDeposit], [revisionId])) where neither the pendingDeposit * nor the revisionId list is empty. */ private static PCollection<KV<String, CoGbkResult>> removeUnreferencedResource( PCollection<KV<String, PendingDeposit>> referencedResources, PCollection<KV<String, Long>> historyEntries, Class<? extends EppResource> resourceClazz) { String resourceName = resourceClazz.getSimpleName(); Class<? extends HistoryEntry> historyEntryClazz = RESOURCE_TYPES_TO_HISTORY_TYPES.get(resourceClazz); String historyEntryName = historyEntryClazz.getSimpleName(); Counter referencedResourceCounter = Metrics.counter("RDE", "Referenced" + resourceName); return KeyedPCollectionTuple.of(PENDING_DEPOSIT, referencedResources) .and(REVISION_ID, historyEntries) .apply( String.format( "Join PendingDeposit with %s revision ID on %s", historyEntryName, resourceName), CoGroupByKey.create()) .apply( String.format("Remove unreferenced %s", resourceName), Filter.by( (KV<String, CoGbkResult> kv) -> { boolean toInclude = // If a resource does not have corresponding pending deposit, it is not // referenced and should not be included. kv.getValue().getAll(PENDING_DEPOSIT).iterator().hasNext() // If a resource does not have revision id (this should not happen, as // every referenced resource must be valid at watermark time, therefore // be embedded in a history entry valid at watermark time, otherwise // the domain cannot reference it), there is no way for us to find the // history entry and load the embedded resource. So we ignore the resource // to keep the downstream process simple. && kv.getValue().getAll(REVISION_ID).iterator().hasNext(); if (toInclude) { referencedResourceCounter.inc(); } return toInclude; })); } private PCollectionTuple processDomainHistories(PCollection<KV<String, Long>> domainHistories) { Counter activeDomainCounter = Metrics.counter("RDE", "ActiveDomainBase"); Counter domainFragmentCounter = Metrics.counter("RDE", "DomainFragment"); Counter referencedContactCounter = Metrics.counter("RDE", "ReferencedContact"); Counter referencedHostCounter = Metrics.counter("RDE", "ReferencedHost"); return domainHistories.apply( "Map DomainHistory to DepositFragment " + "and emit referenced Contact and Host", ParDo.of( new DoFn<KV<String, Long>, KV<PendingDeposit, DepositFragment>>() { @ProcessElement public void processElement( @Element KV<String, Long> kv, MultiOutputReceiver receiver) { activeDomainCounter.inc(); Domain domain = (Domain) loadResourceByHistoryEntryId( DomainHistory.class, kv.getKey(), kv.getValue()); pendingDeposits.stream() .filter(pendingDeposit -> pendingDeposit.tld().equals(domain.getTld())) .forEach( pendingDeposit -> { // Domains are always deposited in both modes. domainFragmentCounter.inc(); receiver .get(DOMAIN_FRAGMENTS) .output( KV.of( pendingDeposit, marshaller.marshalDomain(domain, pendingDeposit.mode()))); // Contacts and hosts are only deposited in RDE, not BRDA. if (pendingDeposit.mode() == RdeMode.FULL) { HashSet<Serializable> contacts = new HashSet<>(); domain.getAdminContact().ifPresent(c -> contacts.add(c.getKey())); domain.getTechContact().ifPresent(c -> contacts.add(c.getKey())); domain.getRegistrant().ifPresent(c -> contacts.add(c.getKey())); domain.getBillingContact().ifPresent(c -> contacts.add(c.getKey())); referencedContactCounter.inc(contacts.size()); contacts.forEach( contactRepoId -> receiver .get(REFERENCED_CONTACTS) .output(KV.of((String) contactRepoId, pendingDeposit))); if (domain.getNsHosts() != null) { referencedHostCounter.inc(domain.getNsHosts().size()); domain .getNsHosts() .forEach( hostKey -> receiver .get(REFERENCED_HOSTS) .output( KV.of( (String) hostKey.getKey(), pendingDeposit))); } } }); } }) .withOutputTags( DOMAIN_FRAGMENTS, TupleTagList.of(REFERENCED_CONTACTS).and(REFERENCED_HOSTS))); } private PCollection<KV<PendingDeposit, DepositFragment>> processContactHistories( PCollection<KV<String, PendingDeposit>> referencedContacts, PCollection<KV<String, Long>> contactHistories) { Counter contactFragmentCounter = Metrics.counter("RDE", "ContactFragment"); return removeUnreferencedResource(referencedContacts, contactHistories, Contact.class) .apply( "Map Contact to DepositFragment", FlatMapElements.into( kvs( TypeDescriptor.of(PendingDeposit.class), TypeDescriptor.of(DepositFragment.class))) .via( (KV<String, CoGbkResult> kv) -> { Contact contact = (Contact) loadResourceByHistoryEntryId( ContactHistory.class, kv.getKey(), kv.getValue().getAll(REVISION_ID)); DepositFragment fragment = marshaller.marshalContact(contact); ImmutableSet<KV<PendingDeposit, DepositFragment>> fragments = Streams.stream(kv.getValue().getAll(PENDING_DEPOSIT)) // The same contact could be used by multiple domains, therefore // matched to the same pending deposit multiple times. .distinct() .map(pendingDeposit -> KV.of(pendingDeposit, fragment)) .collect(toImmutableSet()); contactFragmentCounter.inc(fragments.size()); return fragments; })); } private PCollectionTuple processHostHistories( PCollection<KV<String, PendingDeposit>> referencedHosts, PCollection<KV<String, Long>> hostHistories) { Counter subordinateHostCounter = Metrics.counter("RDE", "SubordinateHost"); Counter externalHostCounter = Metrics.counter("RDE", "ExternalHost"); Counter externalHostFragmentCounter = Metrics.counter("RDE", "ExternalHostFragment"); return removeUnreferencedResource(referencedHosts, hostHistories, Host.class) .apply( "Map external DomainResource to DepositFragment and process subordinate domains", ParDo.of( new DoFn<KV<String, CoGbkResult>, KV<PendingDeposit, DepositFragment>>() { @ProcessElement public void processElement( @Element KV<String, CoGbkResult> kv, MultiOutputReceiver receiver) { Host host = (Host) loadResourceByHistoryEntryId( HostHistory.class, kv.getKey(), kv.getValue().getAll(REVISION_ID)); // When a host is subordinate, we need to find its superordinate domain and // include it in the deposit as well. if (host.isSubordinate()) { subordinateHostCounter.inc(); receiver .get(SUPERORDINATE_DOMAINS) .output( // The output are pairs of // (superordinateDomainRepoId, // (subordinateHostRepoId, (pendingDeposit, revisionId))). KV.of((String) host.getSuperordinateDomain().getKey(), kv)); } else { externalHostCounter.inc(); DepositFragment fragment = marshaller.marshalExternalHost(host); Streams.stream(kv.getValue().getAll(PENDING_DEPOSIT)) // The same host could be used by multiple domains, therefore // matched to the same pending deposit multiple times. .distinct() .forEach( pendingDeposit -> { externalHostFragmentCounter.inc(); receiver .get(EXTERNAL_HOST_FRAGMENTS) .output(KV.of(pendingDeposit, fragment)); }); } } }) .withOutputTags(EXTERNAL_HOST_FRAGMENTS, TupleTagList.of(SUPERORDINATE_DOMAINS))); } /** * Process subordinate hosts by making a deposit fragment with pending transfer information * obtained from its superordinate domain. * * @param superordinateDomains Pairs of (superordinateDomainRepoId, (subordinateHostRepoId, * (pendingDeposit, revisionId))). This collection maps the subordinate host and the pending * deposit to include it to its superordinate domain. * @param domainHistories Pairs of (domainRepoId, revisionId). This collection helps us find the * historical superordinate domain from its history entry and is obtained from calling {@link * #getMostRecentHistoryEntries} for domains. */ private PCollection<KV<PendingDeposit, DepositFragment>> processSubordinateHosts( PCollection<KV<String, KV<String, CoGbkResult>>> superordinateDomains, PCollection<KV<String, Long>> domainHistories) { Counter subordinateHostFragmentCounter = Metrics.counter("RDE", "SubordinateHostFragment"); Counter referencedSubordinateHostCounter = Metrics.counter("RDE", "ReferencedSubordinateHost"); return KeyedPCollectionTuple.of(HOST_TO_PENDING_DEPOSIT, superordinateDomains) .and(REVISION_ID, domainHistories) .apply("Join Host:PendingDeposits with DomainHistory on Domain", CoGroupByKey.create()) .apply( " Remove unreferenced Domain", Filter.by( kv -> { boolean toInclude = kv.getValue().getAll(HOST_TO_PENDING_DEPOSIT).iterator().hasNext() && kv.getValue().getAll(REVISION_ID).iterator().hasNext(); if (toInclude) { referencedSubordinateHostCounter.inc(); } return toInclude; })) .apply( "Map subordinate Host to DepositFragment", FlatMapElements.into( kvs( TypeDescriptor.of(PendingDeposit.class), TypeDescriptor.of(DepositFragment.class))) .via( (KV<String, CoGbkResult> kv) -> { Domain superordinateDomain = (Domain) loadResourceByHistoryEntryId( DomainHistory.class, kv.getKey(), kv.getValue().getAll(REVISION_ID)); ImmutableSet.Builder<KV<PendingDeposit, DepositFragment>> results = new ImmutableSet.Builder<>(); for (KV<String, CoGbkResult> hostToPendingDeposits : kv.getValue().getAll(HOST_TO_PENDING_DEPOSIT)) { Host host = (Host) loadResourceByHistoryEntryId( HostHistory.class, hostToPendingDeposits.getKey(), hostToPendingDeposits.getValue().getAll(REVISION_ID)); DepositFragment fragment = marshaller.marshalSubordinateHost(host, superordinateDomain); Streams.stream(hostToPendingDeposits.getValue().getAll(PENDING_DEPOSIT)) .distinct() .forEach( pendingDeposit -> { subordinateHostFragmentCounter.inc(); results.add(KV.of(pendingDeposit, fragment)); }); } return results.build(); })); } /** * Decodes the pipeline option extracted from the URL parameter sent by the pipeline launcher to * the original pending deposit set. */ @SuppressWarnings("unchecked") static ImmutableSet<PendingDeposit> decodePendingDeposits(String encodedPendingDeposits) { return ImmutableSet.copyOf( safeDeserializeCollection(PendingDeposit.class, decodeBase64(encodedPendingDeposits))); } /** * Encodes the pending deposit set in a URL safe string that is sent to the pipeline worker by the * pipeline launcher as a pipeline option. */ public static String encodePendingDeposits(ImmutableSet<PendingDeposit> pendingDeposits) throws IOException { return encodeBase64(serializeCollection(pendingDeposits)); } public static void main(String[] args) throws IOException, ClassNotFoundException { PipelineOptionsFactory.register(RdePipelineOptions.class); RdePipelineOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(RdePipelineOptions.class); RegistryPipelineOptions.validateRegistryPipelineOptions(options); options.setIsolationOverride(TransactionIsolationLevel.TRANSACTION_READ_COMMITTED); DaggerRdePipeline_RdePipelineComponent.builder().options(options).build().rdePipeline().run(); } /** * A utility class that contains {@link TupleTag}s when {@link PCollectionTuple}s and {@link * CoGbkResult}s are used. */ protected abstract static class TupleTags { protected static final TupleTag<KV<PendingDeposit, DepositFragment>> DOMAIN_FRAGMENTS = new TupleTag<>() {}; protected static final TupleTag<KV<String, PendingDeposit>> REFERENCED_CONTACTS = new TupleTag<>() {}; protected static final TupleTag<KV<String, PendingDeposit>> REFERENCED_HOSTS = new TupleTag<>() {}; protected static final TupleTag<KV<String, KV<String, CoGbkResult>>> SUPERORDINATE_DOMAINS = new TupleTag<>() {}; protected static final TupleTag<KV<PendingDeposit, DepositFragment>> EXTERNAL_HOST_FRAGMENTS = new TupleTag<>() {}; protected static final TupleTag<PendingDeposit> PENDING_DEPOSIT = new TupleTag<>() {}; protected static final TupleTag<KV<String, CoGbkResult>> HOST_TO_PENDING_DEPOSIT = new TupleTag<>() {}; protected static final TupleTag<Long> REVISION_ID = new TupleTag<>() {}; } @Singleton @Component( modules = { CredentialModule.class, ConfigModule.class, CloudTasksUtilsModule.class, UtilsModule.class }) interface RdePipelineComponent { RdePipeline rdePipeline(); @Component.Builder interface Builder { @BindsInstance Builder options(RdePipelineOptions options); RdePipelineComponent build(); } } }
googleapis/google-cloud-java
36,611
java-datastream/proto-google-cloud-datastream-v1/src/main/java/com/google/cloud/datastream/v1/SqlServerTable.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datastream/v1/datastream_resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datastream.v1; /** * * * <pre> * SQLServer table. * </pre> * * Protobuf type {@code google.cloud.datastream.v1.SqlServerTable} */ public final class SqlServerTable extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.SqlServerTable) SqlServerTableOrBuilder { private static final long serialVersionUID = 0L; // Use SqlServerTable.newBuilder() to construct. private SqlServerTable(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SqlServerTable() { table_ = ""; columns_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SqlServerTable(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_SqlServerTable_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_SqlServerTable_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datastream.v1.SqlServerTable.class, com.google.cloud.datastream.v1.SqlServerTable.Builder.class); } public static final int TABLE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object table_ = ""; /** * * * <pre> * Table name. * </pre> * * <code>string table = 1;</code> * * @return The table. */ @java.lang.Override public java.lang.String getTable() { java.lang.Object ref = table_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); table_ = s; return s; } } /** * * * <pre> * Table name. * </pre> * * <code>string table = 1;</code> * * @return The bytes for table. */ @java.lang.Override public com.google.protobuf.ByteString getTableBytes() { java.lang.Object ref = table_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); table_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int COLUMNS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloud.datastream.v1.SqlServerColumn> columns_; /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ @java.lang.Override public java.util.List<com.google.cloud.datastream.v1.SqlServerColumn> getColumnsList() { return columns_; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.datastream.v1.SqlServerColumnOrBuilder> getColumnsOrBuilderList() { return columns_; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ @java.lang.Override public int getColumnsCount() { return columns_.size(); } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ @java.lang.Override public com.google.cloud.datastream.v1.SqlServerColumn getColumns(int index) { return columns_.get(index); } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ @java.lang.Override public com.google.cloud.datastream.v1.SqlServerColumnOrBuilder getColumnsOrBuilder(int index) { return columns_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, table_); } for (int i = 0; i < columns_.size(); i++) { output.writeMessage(2, columns_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, table_); } for (int i = 0; i < columns_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, columns_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datastream.v1.SqlServerTable)) { return super.equals(obj); } com.google.cloud.datastream.v1.SqlServerTable other = (com.google.cloud.datastream.v1.SqlServerTable) obj; if (!getTable().equals(other.getTable())) return false; if (!getColumnsList().equals(other.getColumnsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TABLE_FIELD_NUMBER; hash = (53 * hash) + getTable().hashCode(); if (getColumnsCount() > 0) { hash = (37 * hash) + COLUMNS_FIELD_NUMBER; hash = (53 * hash) + getColumnsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datastream.v1.SqlServerTable parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1.SqlServerTable parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1.SqlServerTable parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.datastream.v1.SqlServerTable prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * SQLServer table. * </pre> * * Protobuf type {@code google.cloud.datastream.v1.SqlServerTable} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.SqlServerTable) com.google.cloud.datastream.v1.SqlServerTableOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_SqlServerTable_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_SqlServerTable_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datastream.v1.SqlServerTable.class, com.google.cloud.datastream.v1.SqlServerTable.Builder.class); } // Construct using com.google.cloud.datastream.v1.SqlServerTable.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; table_ = ""; if (columnsBuilder_ == null) { columns_ = java.util.Collections.emptyList(); } else { columns_ = null; columnsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_SqlServerTable_descriptor; } @java.lang.Override public com.google.cloud.datastream.v1.SqlServerTable getDefaultInstanceForType() { return com.google.cloud.datastream.v1.SqlServerTable.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datastream.v1.SqlServerTable build() { com.google.cloud.datastream.v1.SqlServerTable result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datastream.v1.SqlServerTable buildPartial() { com.google.cloud.datastream.v1.SqlServerTable result = new com.google.cloud.datastream.v1.SqlServerTable(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloud.datastream.v1.SqlServerTable result) { if (columnsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { columns_ = java.util.Collections.unmodifiableList(columns_); bitField0_ = (bitField0_ & ~0x00000002); } result.columns_ = columns_; } else { result.columns_ = columnsBuilder_.build(); } } private void buildPartial0(com.google.cloud.datastream.v1.SqlServerTable result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.table_ = table_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datastream.v1.SqlServerTable) { return mergeFrom((com.google.cloud.datastream.v1.SqlServerTable) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datastream.v1.SqlServerTable other) { if (other == com.google.cloud.datastream.v1.SqlServerTable.getDefaultInstance()) return this; if (!other.getTable().isEmpty()) { table_ = other.table_; bitField0_ |= 0x00000001; onChanged(); } if (columnsBuilder_ == null) { if (!other.columns_.isEmpty()) { if (columns_.isEmpty()) { columns_ = other.columns_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureColumnsIsMutable(); columns_.addAll(other.columns_); } onChanged(); } } else { if (!other.columns_.isEmpty()) { if (columnsBuilder_.isEmpty()) { columnsBuilder_.dispose(); columnsBuilder_ = null; columns_ = other.columns_; bitField0_ = (bitField0_ & ~0x00000002); columnsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnsFieldBuilder() : null; } else { columnsBuilder_.addAllMessages(other.columns_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { table_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.cloud.datastream.v1.SqlServerColumn m = input.readMessage( com.google.cloud.datastream.v1.SqlServerColumn.parser(), extensionRegistry); if (columnsBuilder_ == null) { ensureColumnsIsMutable(); columns_.add(m); } else { columnsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object table_ = ""; /** * * * <pre> * Table name. * </pre> * * <code>string table = 1;</code> * * @return The table. */ public java.lang.String getTable() { java.lang.Object ref = table_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); table_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Table name. * </pre> * * <code>string table = 1;</code> * * @return The bytes for table. */ public com.google.protobuf.ByteString getTableBytes() { java.lang.Object ref = table_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); table_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Table name. * </pre> * * <code>string table = 1;</code> * * @param value The table to set. * @return This builder for chaining. */ public Builder setTable(java.lang.String value) { if (value == null) { throw new NullPointerException(); } table_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Table name. * </pre> * * <code>string table = 1;</code> * * @return This builder for chaining. */ public Builder clearTable() { table_ = getDefaultInstance().getTable(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Table name. * </pre> * * <code>string table = 1;</code> * * @param value The bytes for table to set. * @return This builder for chaining. */ public Builder setTableBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); table_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.cloud.datastream.v1.SqlServerColumn> columns_ = java.util.Collections.emptyList(); private void ensureColumnsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { columns_ = new java.util.ArrayList<com.google.cloud.datastream.v1.SqlServerColumn>(columns_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datastream.v1.SqlServerColumn, com.google.cloud.datastream.v1.SqlServerColumn.Builder, com.google.cloud.datastream.v1.SqlServerColumnOrBuilder> columnsBuilder_; /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public java.util.List<com.google.cloud.datastream.v1.SqlServerColumn> getColumnsList() { if (columnsBuilder_ == null) { return java.util.Collections.unmodifiableList(columns_); } else { return columnsBuilder_.getMessageList(); } } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public int getColumnsCount() { if (columnsBuilder_ == null) { return columns_.size(); } else { return columnsBuilder_.getCount(); } } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public com.google.cloud.datastream.v1.SqlServerColumn getColumns(int index) { if (columnsBuilder_ == null) { return columns_.get(index); } else { return columnsBuilder_.getMessage(index); } } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder setColumns(int index, com.google.cloud.datastream.v1.SqlServerColumn value) { if (columnsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnsIsMutable(); columns_.set(index, value); onChanged(); } else { columnsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder setColumns( int index, com.google.cloud.datastream.v1.SqlServerColumn.Builder builderForValue) { if (columnsBuilder_ == null) { ensureColumnsIsMutable(); columns_.set(index, builderForValue.build()); onChanged(); } else { columnsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder addColumns(com.google.cloud.datastream.v1.SqlServerColumn value) { if (columnsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnsIsMutable(); columns_.add(value); onChanged(); } else { columnsBuilder_.addMessage(value); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder addColumns(int index, com.google.cloud.datastream.v1.SqlServerColumn value) { if (columnsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnsIsMutable(); columns_.add(index, value); onChanged(); } else { columnsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder addColumns( com.google.cloud.datastream.v1.SqlServerColumn.Builder builderForValue) { if (columnsBuilder_ == null) { ensureColumnsIsMutable(); columns_.add(builderForValue.build()); onChanged(); } else { columnsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder addColumns( int index, com.google.cloud.datastream.v1.SqlServerColumn.Builder builderForValue) { if (columnsBuilder_ == null) { ensureColumnsIsMutable(); columns_.add(index, builderForValue.build()); onChanged(); } else { columnsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder addAllColumns( java.lang.Iterable<? extends com.google.cloud.datastream.v1.SqlServerColumn> values) { if (columnsBuilder_ == null) { ensureColumnsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, columns_); onChanged(); } else { columnsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder clearColumns() { if (columnsBuilder_ == null) { columns_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { columnsBuilder_.clear(); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public Builder removeColumns(int index) { if (columnsBuilder_ == null) { ensureColumnsIsMutable(); columns_.remove(index); onChanged(); } else { columnsBuilder_.remove(index); } return this; } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public com.google.cloud.datastream.v1.SqlServerColumn.Builder getColumnsBuilder(int index) { return getColumnsFieldBuilder().getBuilder(index); } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public com.google.cloud.datastream.v1.SqlServerColumnOrBuilder getColumnsOrBuilder(int index) { if (columnsBuilder_ == null) { return columns_.get(index); } else { return columnsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public java.util.List<? extends com.google.cloud.datastream.v1.SqlServerColumnOrBuilder> getColumnsOrBuilderList() { if (columnsBuilder_ != null) { return columnsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(columns_); } } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public com.google.cloud.datastream.v1.SqlServerColumn.Builder addColumnsBuilder() { return getColumnsFieldBuilder() .addBuilder(com.google.cloud.datastream.v1.SqlServerColumn.getDefaultInstance()); } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public com.google.cloud.datastream.v1.SqlServerColumn.Builder addColumnsBuilder(int index) { return getColumnsFieldBuilder() .addBuilder(index, com.google.cloud.datastream.v1.SqlServerColumn.getDefaultInstance()); } /** * * * <pre> * SQLServer columns in the schema. * When unspecified as part of include/exclude objects, * includes/excludes everything. * </pre> * * <code>repeated .google.cloud.datastream.v1.SqlServerColumn columns = 2;</code> */ public java.util.List<com.google.cloud.datastream.v1.SqlServerColumn.Builder> getColumnsBuilderList() { return getColumnsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datastream.v1.SqlServerColumn, com.google.cloud.datastream.v1.SqlServerColumn.Builder, com.google.cloud.datastream.v1.SqlServerColumnOrBuilder> getColumnsFieldBuilder() { if (columnsBuilder_ == null) { columnsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datastream.v1.SqlServerColumn, com.google.cloud.datastream.v1.SqlServerColumn.Builder, com.google.cloud.datastream.v1.SqlServerColumnOrBuilder>( columns_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); columns_ = null; } return columnsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1.SqlServerTable) } // @@protoc_insertion_point(class_scope:google.cloud.datastream.v1.SqlServerTable) private static final com.google.cloud.datastream.v1.SqlServerTable DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datastream.v1.SqlServerTable(); } public static com.google.cloud.datastream.v1.SqlServerTable getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SqlServerTable> PARSER = new com.google.protobuf.AbstractParser<SqlServerTable>() { @java.lang.Override public SqlServerTable parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SqlServerTable> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SqlServerTable> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datastream.v1.SqlServerTable getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,637
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListDatasetsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/dataset_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for * [DatasetService.ListDatasets][google.cloud.aiplatform.v1beta1.DatasetService.ListDatasets]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListDatasetsResponse} */ public final class ListDatasetsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListDatasetsResponse) ListDatasetsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListDatasetsResponse.newBuilder() to construct. private ListDatasetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDatasetsResponse() { datasets_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDatasetsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListDatasetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListDatasetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse.class, com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse.Builder.class); } public static final int DATASETS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1beta1.Dataset> datasets_; /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1beta1.Dataset> getDatasetsList() { return datasets_; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder> getDatasetsOrBuilderList() { return datasets_; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ @java.lang.Override public int getDatasetsCount() { return datasets_.size(); } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Dataset getDatasets(int index) { return datasets_.get(index); } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder getDatasetsOrBuilder(int index) { return datasets_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < datasets_.size(); i++) { output.writeMessage(1, datasets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < datasets_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, datasets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse other = (com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse) obj; if (!getDatasetsList().equals(other.getDatasetsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getDatasetsCount() > 0) { hash = (37 * hash) + DATASETS_FIELD_NUMBER; hash = (53 * hash) + getDatasetsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [DatasetService.ListDatasets][google.cloud.aiplatform.v1beta1.DatasetService.ListDatasets]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListDatasetsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListDatasetsResponse) com.google.cloud.aiplatform.v1beta1.ListDatasetsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListDatasetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListDatasetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse.class, com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (datasetsBuilder_ == null) { datasets_ = java.util.Collections.emptyList(); } else { datasets_ = null; datasetsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListDatasetsResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse build() { com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse result = new com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse result) { if (datasetsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { datasets_ = java.util.Collections.unmodifiableList(datasets_); bitField0_ = (bitField0_ & ~0x00000001); } result.datasets_ = datasets_; } else { result.datasets_ = datasetsBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse.getDefaultInstance()) return this; if (datasetsBuilder_ == null) { if (!other.datasets_.isEmpty()) { if (datasets_.isEmpty()) { datasets_ = other.datasets_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDatasetsIsMutable(); datasets_.addAll(other.datasets_); } onChanged(); } } else { if (!other.datasets_.isEmpty()) { if (datasetsBuilder_.isEmpty()) { datasetsBuilder_.dispose(); datasetsBuilder_ = null; datasets_ = other.datasets_; bitField0_ = (bitField0_ & ~0x00000001); datasetsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDatasetsFieldBuilder() : null; } else { datasetsBuilder_.addAllMessages(other.datasets_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1beta1.Dataset m = input.readMessage( com.google.cloud.aiplatform.v1beta1.Dataset.parser(), extensionRegistry); if (datasetsBuilder_ == null) { ensureDatasetsIsMutable(); datasets_.add(m); } else { datasetsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1beta1.Dataset> datasets_ = java.util.Collections.emptyList(); private void ensureDatasetsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { datasets_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.Dataset>(datasets_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Dataset, com.google.cloud.aiplatform.v1beta1.Dataset.Builder, com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder> datasetsBuilder_; /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Dataset> getDatasetsList() { if (datasetsBuilder_ == null) { return java.util.Collections.unmodifiableList(datasets_); } else { return datasetsBuilder_.getMessageList(); } } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public int getDatasetsCount() { if (datasetsBuilder_ == null) { return datasets_.size(); } else { return datasetsBuilder_.getCount(); } } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Dataset getDatasets(int index) { if (datasetsBuilder_ == null) { return datasets_.get(index); } else { return datasetsBuilder_.getMessage(index); } } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder setDatasets(int index, com.google.cloud.aiplatform.v1beta1.Dataset value) { if (datasetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDatasetsIsMutable(); datasets_.set(index, value); onChanged(); } else { datasetsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder setDatasets( int index, com.google.cloud.aiplatform.v1beta1.Dataset.Builder builderForValue) { if (datasetsBuilder_ == null) { ensureDatasetsIsMutable(); datasets_.set(index, builderForValue.build()); onChanged(); } else { datasetsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder addDatasets(com.google.cloud.aiplatform.v1beta1.Dataset value) { if (datasetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDatasetsIsMutable(); datasets_.add(value); onChanged(); } else { datasetsBuilder_.addMessage(value); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder addDatasets(int index, com.google.cloud.aiplatform.v1beta1.Dataset value) { if (datasetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDatasetsIsMutable(); datasets_.add(index, value); onChanged(); } else { datasetsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder addDatasets( com.google.cloud.aiplatform.v1beta1.Dataset.Builder builderForValue) { if (datasetsBuilder_ == null) { ensureDatasetsIsMutable(); datasets_.add(builderForValue.build()); onChanged(); } else { datasetsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder addDatasets( int index, com.google.cloud.aiplatform.v1beta1.Dataset.Builder builderForValue) { if (datasetsBuilder_ == null) { ensureDatasetsIsMutable(); datasets_.add(index, builderForValue.build()); onChanged(); } else { datasetsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder addAllDatasets( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.Dataset> values) { if (datasetsBuilder_ == null) { ensureDatasetsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, datasets_); onChanged(); } else { datasetsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder clearDatasets() { if (datasetsBuilder_ == null) { datasets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { datasetsBuilder_.clear(); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public Builder removeDatasets(int index) { if (datasetsBuilder_ == null) { ensureDatasetsIsMutable(); datasets_.remove(index); onChanged(); } else { datasetsBuilder_.remove(index); } return this; } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Dataset.Builder getDatasetsBuilder(int index) { return getDatasetsFieldBuilder().getBuilder(index); } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder getDatasetsOrBuilder(int index) { if (datasetsBuilder_ == null) { return datasets_.get(index); } else { return datasetsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder> getDatasetsOrBuilderList() { if (datasetsBuilder_ != null) { return datasetsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(datasets_); } } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Dataset.Builder addDatasetsBuilder() { return getDatasetsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1beta1.Dataset.getDefaultInstance()); } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Dataset.Builder addDatasetsBuilder(int index) { return getDatasetsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1beta1.Dataset.getDefaultInstance()); } /** * * * <pre> * A list of Datasets that matches the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Dataset datasets = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Dataset.Builder> getDatasetsBuilderList() { return getDatasetsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Dataset, com.google.cloud.aiplatform.v1beta1.Dataset.Builder, com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder> getDatasetsFieldBuilder() { if (datasetsBuilder_ == null) { datasetsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Dataset, com.google.cloud.aiplatform.v1beta1.Dataset.Builder, com.google.cloud.aiplatform.v1beta1.DatasetOrBuilder>( datasets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); datasets_ = null; } return datasetsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListDatasetsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListDatasetsResponse) private static final com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse(); } public static com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDatasetsResponse> PARSER = new com.google.protobuf.AbstractParser<ListDatasetsResponse>() { @java.lang.Override public ListDatasetsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDatasetsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDatasetsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListDatasetsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/druid
35,777
processing/src/test/java/org/apache/druid/math/expr/ParserTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.math.expr; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.apache.druid.java.util.common.RE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.segment.column.TypeStrategies; import org.apache.druid.segment.column.TypeStrategiesTest; import org.apache.druid.segment.column.TypeStrategy; import org.apache.druid.testing.InitializedNullHandlingTest; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * */ public class ParserTest extends InitializedNullHandlingTest { @Rule public ExpectedException expectedException = ExpectedException.none(); SettableVectorInputBinding emptyBinding = new SettableVectorInputBinding(8); @BeforeClass public static void setup() { TypeStrategies.registerComplex( TypeStrategiesTest.NULLABLE_TEST_PAIR_TYPE.getComplexTypeName(), new TypeStrategiesTest.NullableLongPairTypeStrategy() ); } @Test public void testSimple() { String actual = Parser.parse("1", ExprMacroTable.nil()).toString(); String expected = "1"; Assert.assertEquals(expected, actual); } @Test public void testParseConstants() { validateLiteral("null", null, null); validateLiteral("'hello'", ExpressionType.STRING, "hello"); validateLiteral("'hello \\uD83E\\uDD18'", ExpressionType.STRING, "hello \uD83E\uDD18"); validateLiteral("1", ExpressionType.LONG, BigInteger.valueOf(1L)); validateLiteral(String.valueOf(Long.MAX_VALUE), ExpressionType.LONG, BigInteger.valueOf(Long.MAX_VALUE)); validateLiteral("1.", ExpressionType.DOUBLE, 1.0, false); validateLiteral("1.234", ExpressionType.DOUBLE, 1.234); validateLiteral("1e10", ExpressionType.DOUBLE, 1.0E10, false); validateLiteral("1e-10", ExpressionType.DOUBLE, 1.0E-10, false); validateLiteral("1E10", ExpressionType.DOUBLE, 1.0E10, false); validateLiteral("1E-10", ExpressionType.DOUBLE, 1.0E-10, false); validateLiteral("1.E10", ExpressionType.DOUBLE, 1.0E10, false); validateLiteral("1.E-10", ExpressionType.DOUBLE, 1.0E-10, false); validateLiteral("1.e10", ExpressionType.DOUBLE, 1.0E10, false); validateLiteral("1.e-10", ExpressionType.DOUBLE, 1.0E-10, false); validateLiteral("1.1e10", ExpressionType.DOUBLE, 1.1E10, false); validateLiteral("1.1e-10", ExpressionType.DOUBLE, 1.1E-10, false); validateLiteral("1.1E10", ExpressionType.DOUBLE, 1.1E10); validateLiteral("1.1E-10", ExpressionType.DOUBLE, 1.1E-10); validateLiteral("Infinity", ExpressionType.DOUBLE, Double.POSITIVE_INFINITY); validateLiteral("NaN", ExpressionType.DOUBLE, Double.NaN); } @Test public void testParseOutOfRangeLong() { // Two greater than Long.MAX_VALUE final String s = "9223372036854775809"; // When not flattening, the "out of long range" error happens during eval. final Expr expr = Parser.parse(s, ExprMacroTable.nil(), false); final ArithmeticException e = Assert.assertThrows( ArithmeticException.class, () -> expr.eval(InputBindings.nilBindings()) ); MatcherAssert.assertThat(e.getMessage(), CoreMatchers.containsString("BigInteger out of long range")); // When flattening, the "out of long range" error happens during parse, not eval. final ArithmeticException e2 = Assert.assertThrows( ArithmeticException.class, () -> Parser.parse(s, ExprMacroTable.nil(), true) ); MatcherAssert.assertThat(e2.getMessage(), CoreMatchers.containsString("BigInteger out of long range")); } @Test public void testFlattenBinaryOpConstantConstant() { final Expr expr = Parser.parse("(2 + -3)", ExprMacroTable.nil(), true); Assert.assertTrue(expr.isLiteral()); Assert.assertEquals(-1L, expr.getLiteralValue()); } @Test public void testFlattenBinaryOpIdentifierConstant() { final Expr expr = Parser.parse("(s + -3)", ExprMacroTable.nil(), true); Assert.assertFalse(expr.isLiteral()); MatcherAssert.assertThat(expr, CoreMatchers.instanceOf(BinPlusExpr.class)); final Expr right = ((BinPlusExpr) expr).right; Assert.assertTrue(right.isLiteral()); Assert.assertEquals(-3L, right.getLiteralValue()); } @Test public void testSimpleUnaryOps1() { String actual = Parser.parse("-x", ExprMacroTable.nil()).toString(); String expected = "-x"; Assert.assertEquals(expected, actual); actual = Parser.parse("!x", ExprMacroTable.nil()).toString(); expected = "!x"; Assert.assertEquals(expected, actual); } @Test public void testSimpleUnaryOps2() { validateFlatten(String.valueOf(Long.MIN_VALUE), String.valueOf(Long.MIN_VALUE), String.valueOf(Long.MIN_VALUE)); validateFlatten("-1", "-1", "-1"); validateFlatten("--1", "--1", "1"); validateFlatten("-1+2", "(+ -1 2)", "1"); validateFlatten("-1*2", "(* -1 2)", "-2"); validateFlatten("-1^2", "(^ -1 2)", "1"); } @Test public void testSimpleLogicalOps1() { validateParser("x>y", "(> x y)", ImmutableList.of("x", "y")); validateParser("x<y", "(< x y)", ImmutableList.of("x", "y")); validateParser("x<=y", "(<= x y)", ImmutableList.of("x", "y")); validateParser("x>=y", "(>= x y)", ImmutableList.of("x", "y")); validateParser("x==y", "(== x y)", ImmutableList.of("x", "y")); validateParser("x!=y", "(!= x y)", ImmutableList.of("x", "y")); validateParser("x && y", "(&& x y)", ImmutableList.of("x", "y")); validateParser("x || y", "(|| x y)", ImmutableList.of("x", "y")); } @Test public void testSimpleAdditivityOp1() { validateParser("x+y", "(+ x y)", ImmutableList.of("x", "y")); validateParser("x-y", "(- x y)", ImmutableList.of("x", "y")); } @Test public void testSimpleAdditivityOp2() { validateParser("x+y+z", "(+ (+ x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x+y-z", "(- (+ x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x-y+z", "(+ (- x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x-y-z", "(- (- x y) z)", ImmutableList.of("x", "y", "z")); validateParser("x-y-x", "(- (- x y) x)", ImmutableList.of("x", "y"), ImmutableSet.of("x", "x_0", "y")); } @Test public void testSimpleMultiplicativeOp1() { validateParser("x*y", "(* x y)", ImmutableList.of("x", "y")); validateParser("x/y", "(/ x y)", ImmutableList.of("x", "y")); validateParser("x%y", "(% x y)", ImmutableList.of("x", "y")); } @Test public void testSimpleMultiplicativeOp2() { validateFlatten("1*2*3", "(* (* 1 2) 3)", "6"); validateFlatten("1*2/3", "(/ (* 1 2) 3)", "0"); validateFlatten("1/2*3", "(* (/ 1 2) 3)", "0"); validateFlatten("1/2/3", "(/ (/ 1 2) 3)", "0"); validateFlatten("1.0*2*3", "(* (* 1.0 2) 3)", "6.0"); validateFlatten("1.0*2/3", "(/ (* 1.0 2) 3)", "0.6666666666666666"); validateFlatten("1.0/2*3", "(* (/ 1.0 2) 3)", "1.5"); validateFlatten("1.0/2/3", "(/ (/ 1.0 2) 3)", "0.16666666666666666"); // partial validateFlatten("1.0*2*x", "(* (* 1.0 2) x)", "(* 2.0 x)"); validateFlatten("1.0*2/x", "(/ (* 1.0 2) x)", "(/ 2.0 x)"); validateFlatten("1.0/2*x", "(* (/ 1.0 2) x)", "(* 0.5 x)"); validateFlatten("1.0/2/x", "(/ (/ 1.0 2) x)", "(/ 0.5 x)"); // not working yet validateFlatten("1.0*x*3", "(* (* 1.0 x) 3)", "(* (* 1.0 x) 3)"); } @Test public void testSimpleCarrot1() { validateFlatten("1^2", "(^ 1 2)", "1"); } @Test public void testSimpleCarrot2() { validateFlatten("1^2^3", "(^ 1 (^ 2 3))", "1"); } @Test public void testMixed() { validateFlatten("1+2*3", "(+ 1 (* 2 3))", "7"); validateFlatten("1+(2*3)", "(+ 1 (* 2 3))", "7"); validateFlatten("(1+2)*3", "(* (+ 1 2) 3)", "9"); validateFlatten("1*2+3", "(+ (* 1 2) 3)", "5"); validateFlatten("(1*2)+3", "(+ (* 1 2) 3)", "5"); validateFlatten("1*(2+3)", "(* 1 (+ 2 3))", "5"); validateFlatten("1+2^3", "(+ 1 (^ 2 3))", "9"); validateFlatten("1+(2^3)", "(+ 1 (^ 2 3))", "9"); validateFlatten("(1+2)^3", "(^ (+ 1 2) 3)", "27"); validateFlatten("1^2+3", "(+ (^ 1 2) 3)", "4"); validateFlatten("(1^2)+3", "(+ (^ 1 2) 3)", "4"); validateFlatten("1^(2+3)", "(^ 1 (+ 2 3))", "1"); validateFlatten("1^2*3+4", "(+ (* (^ 1 2) 3) 4)", "7"); validateFlatten("-1^2*-3+-4", "(+ (* (^ -1 2) -3) -4)", "-7"); validateFlatten("max(3, 4)", "(max [3, 4])", "4"); validateFlatten("min(1, max(3, 4))", "(min [1, (max [3, 4])])", "1"); } @Test public void testIdentifiers() { validateParser("foo", "foo", ImmutableList.of("foo"), ImmutableSet.of()); validateParser("\"foo\"", "foo", ImmutableList.of("foo"), ImmutableSet.of()); validateParser("\"foo bar\"", "foo bar", ImmutableList.of("foo bar"), ImmutableSet.of()); validateParser("\"foo\\\"bar\"", "foo\"bar", ImmutableList.of("foo\"bar"), ImmutableSet.of()); } @Test public void testLiterals() { validateConstantExpression("\'foo\'", "foo"); validateConstantExpression("\'foo bar\'", "foo bar"); validateConstantExpression("\'föo bar\'", "föo bar"); validateConstantExpression("\'f\\u0040o bar\'", "f@o bar"); validateConstantExpression("\'f\\u000Ao \\'b\\\\\\\"ar\'", "f\no 'b\\\"ar"); } @Test public void testLiteralArraysHomogeneousElements() { validateConstantExpression("[1.0, 2.345]", new Object[]{1.0, 2.345}); validateConstantExpression("[1, 3]", new Object[]{1L, 3L}); validateConstantExpression("['hello', 'world']", new Object[]{"hello", "world"}); } @Test public void testLiteralArraysHomogeneousOrNullElements() { validateConstantExpression("[1.0, null, 2.345]", new Object[]{1.0, null, 2.345}); validateConstantExpression("[null, 1, 3]", new Object[]{null, 1L, 3L}); validateConstantExpression("['hello', 'world', null]", new Object[]{"hello", "world", null}); } @Test public void testLiteralArraysEmptyAndAllNullImplicitAreString() { validateConstantExpression("[]", new Object[0]); validateConstantExpression("[null, null, null]", new Object[]{null, null, null}); } @Test public void testLiteralArraysImplicitTypedNumericMixed() { // implicit typed numeric arrays with mixed elements are doubles validateConstantExpression("[1, null, 2000.0]", new Object[]{1.0, null, 2000.0}); validateConstantExpression("[1.0, null, 2000]", new Object[]{1.0, null, 2000.0}); } @Test public void testLiteralArraysExplicitTypedEmpties() { // legacy explicit array format validateConstantExpression("ARRAY<STRING>[]", new Object[0]); validateConstantExpression("ARRAY<DOUBLE>[]", new Object[0]); validateConstantExpression("ARRAY<LONG>[]", new Object[0]); } @Test public void testLiteralArraysExplicitAllNull() { // legacy explicit array format validateConstantExpression("ARRAY<DOUBLE>[null, null, null]", new Object[]{null, null, null}); validateConstantExpression("ARRAY<LONG>[null, null, null]", new Object[]{null, null, null}); validateConstantExpression("ARRAY<STRING>[null, null, null]", new Object[]{null, null, null}); } @Test public void testLiteralArraysExplicitTypes() { // legacy explicit array format validateConstantExpression("ARRAY<DOUBLE>[1.0, null, 2000.0]", new Object[]{1.0, null, 2000.0}); validateConstantExpression("ARRAY<LONG>[3, null, 4]", new Object[]{3L, null, 4L}); validateConstantExpression("ARRAY<STRING>['foo', 'bar', 'baz']", new Object[]{"foo", "bar", "baz"}); } @Test public void testLiteralArraysExplicitTypesMixedElements() { // legacy explicit array format // explicit typed numeric arrays mixed numeric types should coerce to the correct explicit type validateConstantExpression("ARRAY<DOUBLE>[3, null, 4, 2.345]", new Object[]{3.0, null, 4.0, 2.345}); validateConstantExpression("ARRAY<LONG>[1.0, null, 2000.0]", new Object[]{1L, null, 2000L}); // explicit typed string arrays should accept any literal and convert to string validateConstantExpression("ARRAY<STRING>['1', null, 2000, 1.1]", new Object[]{"1", null, "2000", "1.1"}); } @Test public void testLiteralExplicitTypedArrays() { validateConstantExpression("ARRAY<DOUBLE>[1.0, 2.0, null, 3.0]", new Object[]{1.0, 2.0, null, 3.0}); validateConstantExpression("ARRAY<LONG>[1, 2, null, 3]", new Object[]{1L, 2L, null, 3L}); validateConstantExpression("ARRAY<STRING>['1', '2', null, '3.0']", new Object[]{"1", "2", null, "3.0"}); // mixed type tests validateConstantExpression("ARRAY<DOUBLE>[3, null, 4, 2.345]", new Object[]{3.0, null, 4.0, 2.345}); validateConstantExpression("ARRAY<LONG>[1.0, null, 2000.0]", new Object[]{1L, null, 2000L}); // explicit typed string arrays should accept any literal and convert validateConstantExpression("ARRAY<STRING>['1', null, 2000, 1.1]", new Object[]{"1", null, "2000", "1.1"}); validateConstantExpression("ARRAY<LONG>['1', null, 2000, 1.1]", new Object[]{1L, null, 2000L, 1L}); validateConstantExpression("ARRAY<DOUBLE>['1', null, 2000, 1.1]", new Object[]{1.0, null, 2000.0, 1.1}); // the gramar isn't cool enough yet to parse populated nested-arrays or complex arrays..., but empty ones can // be defined... validateConstantExpression("ARRAY<COMPLEX<nullableLongPair>>[]", new Object[]{}); validateConstantExpression("ARRAY<ARRAY<LONG>>[]", new Object[]{}); } @Test public void testConstantComplexAndNestedArrays() { // they can be built with array builder functions though... validateConstantExpression( "array(['foo', 'bar', 'baz'], ['baz','foo','bar'])", new Object[]{new Object[]{"foo", "bar", "baz"}, new Object[]{"baz", "foo", "bar"}} ); // nested arrays cannot be mixed types, the first element choo-choo-chooses for you validateConstantExpression( "array(['foo', 'bar', 'baz'], ARRAY<LONG>[1,2,3])", new Object[]{new Object[]{"foo", "bar", "baz"}, new Object[]{"1", "2", "3"}} ); // complex types too TypeStrategiesTest.NullableLongPair l1 = new TypeStrategiesTest.NullableLongPair(1L, 2L); TypeStrategiesTest.NullableLongPair l2 = new TypeStrategiesTest.NullableLongPair(2L, 3L); TypeStrategy byteStrategy = TypeStrategiesTest.NULLABLE_TEST_PAIR_TYPE.getStrategy(); final byte[] b1 = new byte[byteStrategy.estimateSizeBytes(l1)]; final byte[] b2 = new byte[byteStrategy.estimateSizeBytes(l2)]; ByteBuffer bb1 = ByteBuffer.wrap(b1); ByteBuffer bb2 = ByteBuffer.wrap(b2); int w1 = byteStrategy.write(bb1, l1, b1.length); int w2 = byteStrategy.write(bb2, l2, b2.length); Assert.assertTrue(w1 > 0); Assert.assertTrue(w2 > 0); String l1String = StringUtils.format( "complex_decode_base64('%s', '%s')", TypeStrategiesTest.NULLABLE_TEST_PAIR_TYPE.getComplexTypeName(), StringUtils.encodeBase64String(b1) ); String l2String = StringUtils.format( "complex_decode_base64('%s', '%s')", TypeStrategiesTest.NULLABLE_TEST_PAIR_TYPE.getComplexTypeName(), StringUtils.encodeBase64String(b2) ); validateConstantExpression( l1String, l1 ); validateConstantExpression( StringUtils.format("array(%s,%s)", l1String, l2String), new Object[]{l1, l2} ); } @Test public void testLiteralArrayImplicitStringParseException() { // implicit typed string array cannot handle literals thate are not null or string expectedException.expect(RE.class); expectedException.expectMessage("Failed to parse array: element 2000 is not a string"); validateConstantExpression("['1', null, 2000, 1.1]", new Object[]{"1", null, "2000", "1.1"}); } @Test public void testLiteralArraysExplicitLongParseException() { // explicit typed long arrays only handle numeric types expectedException.expect(RE.class); expectedException.expectMessage("Failed to parse array element '2000' as a long"); validateConstantExpression("<LONG>[1, null, '2000']", new Object[]{1L, null, 2000L}); } @Test public void testLiteralArraysExplicitDoubleParseException() { // explicit typed double arrays only handle numeric types expectedException.expect(RE.class); expectedException.expectMessage("Failed to parse array element '2000.0' as a double"); validateConstantExpression("<DOUBLE>[1.0, null, '2000.0']", new Object[]{1.0, null, 2000.0}); } @Test public void testFunctions() { validateParser("sqrt(x)", "(sqrt [x])", ImmutableList.of("x")); validateParser("if(cond,then,else)", "(if [cond, then, else])", ImmutableList.of("then", "cond", "else")); validateParser("cast(x, 'STRING')", "(cast [x, STRING])", ImmutableList.of("x")); validateParser("cast(x, 'LONG')", "(cast [x, LONG])", ImmutableList.of("x")); validateParser("cast(x, 'DOUBLE')", "(cast [x, DOUBLE])", ImmutableList.of("x")); validateParser( "cast(x, 'STRING_ARRAY')", "(cast [x, STRING_ARRAY])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "cast(x, 'LONG_ARRAY')", "(cast [x, LONG_ARRAY])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "cast(x, 'DOUBLE_ARRAY')", "(cast [x, DOUBLE_ARRAY])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "array_length(x)", "(array_length [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "array_concat(x, y)", "(array_concat [x, y])", ImmutableList.of("x", "y"), ImmutableSet.of(), ImmutableSet.of("x", "y") ); validateParser( "array_append(x, y)", "(array_append [x, y])", ImmutableList.of("x", "y"), ImmutableSet.of("y"), ImmutableSet.of("x") ); validateFlatten("sqrt(4)", "(sqrt [4])", "2.0"); validateFlatten("array_concat([1, 2], [3, 4])", "(array_concat [[1, 2], [3, 4]])", "[1, 2, 3, 4]"); } @Test public void testApplyFunctions() { validateParser( "map((x) -> 1, x)", "(map ([x] -> 1), [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "map((x) -> x + 1, x)", "(map ([x] -> (+ x 1)), [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "x + map((x) -> x + 1, y)", "(+ x (map ([x] -> (+ x 1)), [y]))", ImmutableList.of("x", "y"), ImmutableSet.of("x"), ImmutableSet.of("y") ); validateParser( "x + map((x) -> x + 1, x)", "(+ x (map ([x] -> (+ x 1)), [x]))", ImmutableList.of("x"), ImmutableSet.of("x"), ImmutableSet.of("x_0") ); validateParser( "map((x) -> concat(x, y), z)", "(map ([x] -> (concat [x, y])), [z])", ImmutableList.of("y", "z"), ImmutableSet.of("y"), ImmutableSet.of("z") ); // 'y' is accumulator, and currently unknown validateParser( "fold((x, acc) -> acc + x, x, y)", "(fold ([x, acc] -> (+ acc x)), [x, y])", ImmutableList.of("x", "y"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "fold((x, acc) -> acc + x, map((x) -> x + 1, x), y)", "(fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])", ImmutableList.of("x", "y"), ImmutableSet.of(), ImmutableSet.of("x") ); validateParser( "array_append(z, fold((x, acc) -> acc + x, map((x) -> x + 1, x), y))", "(array_append [z, (fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])])", ImmutableList.of("x", "y", "z"), ImmutableSet.of(), ImmutableSet.of("x", "z") ); validateParser( "map(z -> z + 1, array_append(z, fold((x, acc) -> acc + x, map((x) -> x + 1, x), y)))", "(map ([z] -> (+ z 1)), [(array_append [z, (fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])])])", ImmutableList.of("x", "y", "z"), ImmutableSet.of(), ImmutableSet.of("x", "z") ); validateParser( "array_append(map(z -> z + 1, array_append(z, fold((x, acc) -> acc + x, map((x) -> x + 1, x), y))), a)", "(array_append [(map ([z] -> (+ z 1)), [(array_append [z, (fold ([x, acc] -> (+ acc x)), [(map ([x] -> (+ x 1)), [x]), y])])]), a])", ImmutableList.of("x", "y", "a", "z"), ImmutableSet.of("a"), ImmutableSet.of("x", "z") ); validateFlatten("map((x) -> x + 1, [1, 2, 3, 4])", "(map ([x] -> (+ x 1)), [[1, 2, 3, 4]])", "[2, 3, 4, 5]"); validateFlatten( "map((x) -> x + z, [1, 2, 3, 4])", "(map ([x] -> (+ x z)), [[1, 2, 3, 4]])", "(map ([x] -> (+ x z)), [[1, 2, 3, 4]])" ); } @Test public void testApplyUnapplied() { validateApplyUnapplied("x + 1", "(+ x 1)", "(+ x 1)", ImmutableList.of()); validateApplyUnapplied("x + 1", "(+ x 1)", "(+ x 1)", ImmutableList.of("z")); validateApplyUnapplied("x + y", "(+ x y)", "(map ([x] -> (+ x y)), [x])", ImmutableList.of("x")); validateApplyUnapplied( "x + y", "(+ x y)", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", ImmutableList.of("x", "y") ); validateApplyUnapplied( "map(x -> x + y, x)", "(map ([x] -> (+ x y)), [x])", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", ImmutableList.of("y") ); validateApplyUnapplied( "map(x -> x + 1, x + 1)", "(map ([x] -> (+ x 1)), [(+ x 1)])", "(map ([x] -> (+ x 1)), [(map ([x] -> (+ x 1)), [x])])", ImmutableList.of("x") ); validateApplyUnapplied( "fold((x, acc) -> acc + x + y, x, 0)", "(fold ([x, acc] -> (+ (+ acc x) y)), [x, 0])", "(cartesian_fold ([x, y, acc] -> (+ (+ acc x) y)), [x, y, 0])", ImmutableList.of("y") ); validateApplyUnapplied( "z + fold((x, acc) -> acc + x + y, x, 0)", "(+ z (fold ([x, acc] -> (+ (+ acc x) y)), [x, 0]))", "(+ z (cartesian_fold ([x, y, acc] -> (+ (+ acc x) y)), [x, y, 0]))", ImmutableList.of("y") ); validateApplyUnapplied( "z + fold((x, acc) -> acc + x + y, x, 0)", "(+ z (fold ([x, acc] -> (+ (+ acc x) y)), [x, 0]))", "(map ([z] -> (+ z (cartesian_fold ([x, y, acc] -> (+ (+ acc x) y)), [x, y, 0]))), [z])", ImmutableList.of("y", "z") ); validateApplyUnapplied( "array_to_string(concat(x, 'hello'), ',')", "(array_to_string [(concat [x, hello]), ,])", "(array_to_string [(map ([x] -> (concat [x, hello])), [x]), ,])", ImmutableList.of("x", "y") ); validateApplyUnapplied( "cast(x, 'LONG')", "(cast [x, LONG])", "(map ([x] -> (cast [x, LONG])), [x])", ImmutableList.of("x") ); validateApplyUnapplied( "cartesian_map((x,y) -> x + y, x, y)", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", "(cartesian_map ([x, y] -> (+ x y)), [x, y])", ImmutableList.of("y") ); validateApplyUnapplied( "cast(x, 'LONG_ARRAY')", "(cast [x, LONG_ARRAY])", "(cast [x, LONG_ARRAY])", ImmutableList.of("x") ); validateApplyUnapplied( "case_searched((x == 'b'),'b',(x == 'g'),'g','Other')", "(case_searched [(== x b), b, (== x g), g, Other])", "(map ([x] -> (case_searched [(== x b), b, (== x g), g, Other])), [x])", ImmutableList.of("x") ); validateApplyUnapplied( "array_overlap(nvl(x, 'other'), ['a', 'b', 'other'])", "(array_overlap [(nvl [x, other]), [a, b, other]])", "(array_overlap [(map ([x] -> (nvl [x, other])), [x]), [a, b, other]])", ImmutableList.of("x") ); } @Test public void testFoldUnapplied() { validateFoldUnapplied("x + __acc", "(+ x __acc)", "(+ x __acc)", ImmutableList.of(), "__acc"); validateFoldUnapplied("x + __acc", "(+ x __acc)", "(+ x __acc)", ImmutableList.of("z"), "__acc"); validateFoldUnapplied( "x + __acc", "(+ x __acc)", "(fold ([x, __acc] -> (+ x __acc)), [x, __acc])", ImmutableList.of("x"), "__acc" ); validateFoldUnapplied( "x + y + __acc", "(+ (+ x y) __acc)", "(cartesian_fold ([x, y, __acc] -> (+ (+ x y) __acc)), [x, y, __acc])", ImmutableList.of("x", "y"), "__acc" ); validateFoldUnapplied( "__acc + z + fold((x, acc) -> acc + x + y, x, 0)", "(+ (+ __acc z) (fold ([x, acc] -> (+ (+ acc x) y)), [x, 0]))", "(fold ([z, __acc] -> (+ (+ __acc z) (fold ([x, acc] -> (+ (+ acc x) y)), [x, 0]))), [z, __acc])", ImmutableList.of("z"), "__acc" ); validateFoldUnapplied( "__acc + z + fold((x, acc) -> acc + x + y, x, 0)", "(+ (+ __acc z) (fold ([x, acc] -> (+ (+ acc x) y)), [x, 0]))", "(fold ([z, __acc] -> (+ (+ __acc z) (cartesian_fold ([x, y, acc] -> (+ (+ acc x) y)), [x, y, 0]))), [z, __acc])", ImmutableList.of("y", "z"), "__acc" ); validateFoldUnapplied( "__acc + fold((x, acc) -> x + y + acc, x, __acc)", "(+ __acc (fold ([x, acc] -> (+ (+ x y) acc)), [x, __acc]))", "(+ __acc (cartesian_fold ([x, y, acc] -> (+ (+ x y) acc)), [x, y, __acc]))", ImmutableList.of("y"), "__acc" ); } @Test public void testUniquify() { validateParser("x-x", "(- x x)", ImmutableList.of("x"), ImmutableSet.of("x", "x_0")); validateParser( "x - x + x", "(+ (- x x) x)", ImmutableList.of("x"), ImmutableSet.of("x", "x_0", "x_1") ); validateParser( "map((x) -> x + x, x)", "(map ([x] -> (+ x x)), [x])", ImmutableList.of("x"), ImmutableSet.of(), ImmutableSet.of("x") ); validateApplyUnapplied( "x + x", "(+ x x)", "(map ([x] -> (+ x x)), [x])", ImmutableList.of("x") ); validateApplyUnapplied( "x + x + x", "(+ (+ x x) x)", "(map ([x] -> (+ (+ x x) x)), [x])", ImmutableList.of("x") ); // heh validateApplyUnapplied( "x + x + x + y + y + y + y + z + z + z", "(+ (+ (+ (+ (+ (+ (+ (+ (+ x x) x) y) y) y) y) z) z) z)", "(cartesian_map ([x, y, z] -> (+ (+ (+ (+ (+ (+ (+ (+ (+ x x) x) y) y) y) y) z) z) z)), [x, y, z])", ImmutableList.of("x", "y", "z") ); } private void validateLiteral(String expr, ExpressionType type, Object expected) { validateLiteral(expr, type, expected, true); } private void validateLiteral(String expr, ExpressionType type, Object expected, boolean roundTrip) { Expr parsed = Parser.parse(expr, ExprMacroTable.nil(), false); Expr parsedFlat = Parser.parse(expr, ExprMacroTable.nil(), true); Assert.assertTrue(parsed.isLiteral()); Assert.assertTrue(parsedFlat.isLiteral()); Assert.assertFalse(parsed.isIdentifier()); Assert.assertEquals(type, parsed.getOutputType(emptyBinding)); Assert.assertEquals(type, parsedFlat.getOutputType(emptyBinding)); Assert.assertEquals(expected, parsed.getLiteralValue()); Assert.assertEquals( // Special case comparison: literal integers start life as BigIntegerExpr; converted to LongExpr later. expected instanceof BigInteger ? ((BigInteger) expected).longValueExact() : expected, parsedFlat.getLiteralValue() ); if (roundTrip) { Assert.assertEquals(expr, parsed.stringify()); Assert.assertEquals(expr, parsedFlat.stringify()); } } private void validateFlatten(String expression, String withoutFlatten, String withFlatten) { Expr notFlat = Parser.parse(expression, ExprMacroTable.nil(), false); Expr flat = Parser.parse(expression, ExprMacroTable.nil(), true); Assert.assertEquals(expression, withoutFlatten, notFlat.toString()); Assert.assertEquals(expression, withFlatten, flat.toString()); Expr notFlatRoundTrip = Parser.parse(notFlat.stringify(), ExprMacroTable.nil(), false); Expr flatRoundTrip = Parser.parse(flat.stringify(), ExprMacroTable.nil(), true); Assert.assertEquals(expression, withoutFlatten, notFlatRoundTrip.toString()); Assert.assertEquals(expression, withFlatten, flatRoundTrip.toString()); Assert.assertEquals(notFlat.stringify(), notFlatRoundTrip.stringify()); Assert.assertEquals(flat.stringify(), flatRoundTrip.stringify()); } private void validateParser(String expression, String expected, List<String> identifiers) { validateParser(expression, expected, identifiers, ImmutableSet.copyOf(identifiers), Collections.emptySet()); } private void validateParser(String expression, String expected, List<String> identifiers, Set<String> scalars) { validateParser(expression, expected, identifiers, scalars, Collections.emptySet()); } private void validateParser( String expression, String expected, List<String> identifiers, Set<String> scalars, Set<String> arrays ) { final Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); if (parsed instanceof IdentifierExpr) { Assert.assertTrue(parsed.isIdentifier()); } else { Assert.assertFalse(parsed.isIdentifier()); } final Expr.BindingAnalysis deets = parsed.analyzeInputs(); Assert.assertEquals(expression, expected, parsed.toString()); Assert.assertEquals(expression, new HashSet<>(identifiers), deets.getRequiredBindings()); Assert.assertEquals(expression, scalars, deets.getScalarVariables()); Assert.assertEquals(expression, arrays, deets.getArrayVariables()); final Expr parsedNoFlatten = Parser.parse(expression, ExprMacroTable.nil(), false); final Expr roundTrip = Parser.parse(parsedNoFlatten.stringify(), ExprMacroTable.nil()); Assert.assertEquals(parsed.stringify(), roundTrip.stringify()); final Expr.BindingAnalysis roundTripDeets = roundTrip.analyzeInputs(); Assert.assertEquals(expression, new HashSet<>(identifiers), roundTripDeets.getRequiredBindings()); Assert.assertEquals(expression, scalars, roundTripDeets.getScalarVariables()); Assert.assertEquals(expression, arrays, roundTripDeets.getArrayVariables()); } private void validateApplyUnapplied( String expression, String unapplied, String applied, List<String> identifiers ) { final Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); Expr.BindingAnalysis deets = parsed.analyzeInputs(); Parser.validateExpr(parsed, deets); final Expr transformed = Parser.applyUnappliedBindings(parsed, deets, identifiers); Assert.assertEquals(expression, unapplied, parsed.toString()); Assert.assertEquals(applied, applied, transformed.toString()); final Expr parsedNoFlatten = Parser.parse(expression, ExprMacroTable.nil(), false); final Expr parsedRoundTrip = Parser.parse(parsedNoFlatten.stringify(), ExprMacroTable.nil()); Expr.BindingAnalysis roundTripDeets = parsedRoundTrip.analyzeInputs(); Parser.validateExpr(parsedRoundTrip, roundTripDeets); final Expr transformedRoundTrip = Parser.applyUnappliedBindings(parsedRoundTrip, roundTripDeets, identifiers); Assert.assertEquals(expression, unapplied, parsedRoundTrip.toString()); Assert.assertEquals(applied, applied, transformedRoundTrip.toString()); Assert.assertEquals(parsed.stringify(), parsedRoundTrip.stringify()); Assert.assertEquals(transformed.stringify(), transformedRoundTrip.stringify()); } private void validateFoldUnapplied( String expression, String unapplied, String applied, List<String> identifiers, String accumulator ) { final Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); Expr.BindingAnalysis deets = parsed.analyzeInputs(); Parser.validateExpr(parsed, deets); final Expr transformed = Parser.foldUnappliedBindings(parsed, deets, identifiers, accumulator); Assert.assertEquals(expression, unapplied, parsed.toString()); Assert.assertEquals(applied, applied, transformed.toString()); final Expr parsedNoFlatten = Parser.parse(expression, ExprMacroTable.nil(), false); final Expr parsedRoundTrip = Parser.parse(parsedNoFlatten.stringify(), ExprMacroTable.nil()); Expr.BindingAnalysis roundTripDeets = parsedRoundTrip.analyzeInputs(); Parser.validateExpr(parsedRoundTrip, roundTripDeets); final Expr transformedRoundTrip = Parser.foldUnappliedBindings(parsedRoundTrip, roundTripDeets, identifiers, accumulator); Assert.assertEquals(expression, unapplied, parsedRoundTrip.toString()); Assert.assertEquals(applied, applied, transformedRoundTrip.toString()); Assert.assertEquals(parsed.stringify(), parsedRoundTrip.stringify()); Assert.assertEquals(transformed.stringify(), transformedRoundTrip.stringify()); } private void validateConstantExpression(String expression, Object expected) { Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); Assert.assertEquals( expression, expected, parsed.eval(InputBindings.nilBindings()).value() ); final Expr parsedNoFlatten = Parser.parse(expression, ExprMacroTable.nil(), false); Expr parsedRoundTrip = Parser.parse(parsedNoFlatten.stringify(), ExprMacroTable.nil()); Assert.assertEquals( expression, expected, parsedRoundTrip.eval(InputBindings.nilBindings()).value() ); Assert.assertEquals(parsed.stringify(), parsedRoundTrip.stringify()); } private void validateConstantExpression(String expression, Object[] expected) { Expr parsed = Parser.parse(expression, ExprMacroTable.nil()); Object evaluated = parsed.eval(InputBindings.nilBindings()).value(); Assert.assertArrayEquals( expression, expected, (Object[]) evaluated ); Assert.assertEquals(expected.getClass(), evaluated.getClass()); final Expr parsedNoFlatten = Parser.parse(expression, ExprMacroTable.nil(), false); Expr roundTrip = Parser.parse(parsedNoFlatten.stringify(), ExprMacroTable.nil()); Assert.assertArrayEquals( expression, expected, (Object[]) roundTrip.eval(InputBindings.nilBindings()).value() ); Assert.assertEquals(parsed.stringify(), roundTrip.stringify()); } }
apache/commons-imaging
36,546
src/main/java/org/apache/commons/imaging/formats/tiff/TiffDirectory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.formats.tiff; import java.awt.image.BufferedImage; import java.io.IOException; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.apache.commons.imaging.ImagingException; import org.apache.commons.imaging.common.Allocator; import org.apache.commons.imaging.common.ByteConversions; import org.apache.commons.imaging.common.RationalNumber; import org.apache.commons.imaging.formats.tiff.constants.TiffConstants; import org.apache.commons.imaging.formats.tiff.constants.TiffDirectoryConstants; import org.apache.commons.imaging.formats.tiff.constants.TiffTagConstants; import org.apache.commons.imaging.formats.tiff.fieldtypes.AbstractFieldType; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfo; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoAscii; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoByte; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoBytes; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoDouble; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoDoubles; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoFloat; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoFloats; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoGpsText; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoLong; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoLongs; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoRational; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoRationals; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSByte; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSBytes; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSLong; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSLongs; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSRational; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSRationals; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSShort; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoSShorts; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoShort; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoShortOrLong; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoShorts; import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoXpString; /** * Provides methods and elements for accessing an Image File Directory (IFD) from a TIFF file. In the TIFF specification, the IFD is the main container for * individual images or sets of metadata. While not all Directories contain images, images are always stored in a Directory. */ public class TiffDirectory extends AbstractTiffElement implements Iterable<TiffField> { public static final class ImageDataElement extends AbstractTiffElement { public ImageDataElement(final long offset, final int length) { super(offset, length); } @Override public String getElementDescription() { return "ImageDataElement"; } } public static String description(final int type) { switch (type) { case TiffDirectoryConstants.DIRECTORY_TYPE_UNKNOWN: return "Unknown"; case TiffDirectoryConstants.DIRECTORY_TYPE_ROOT: return "Root"; case TiffDirectoryConstants.DIRECTORY_TYPE_SUB: return "Sub"; case TiffDirectoryConstants.DIRECTORY_TYPE_THUMBNAIL: return "Thumbnail"; case TiffDirectoryConstants.DIRECTORY_TYPE_EXIF: return "Exif"; case TiffDirectoryConstants.DIRECTORY_TYPE_GPS: return "Gps"; case TiffDirectoryConstants.DIRECTORY_TYPE_INTEROPERABILITY: return "Interoperability"; default: return "Bad Type"; } } private final List<TiffField> entries; /** * Preserves the byte order derived from the TIFF file header. Some of the legacy methods in this class require byte order as an argument, though that use * could be phased out eventually. */ private final ByteOrder headerByteOrder; private JpegImageData jpegImageData; private final long nextDirectoryOffset; private AbstractTiffImageData abstractTiffImageData; public final int type; public TiffDirectory(final int type, final List<TiffField> entries, final long offset, final long nextDirectoryOffset, final ByteOrder byteOrder) { super(offset, TiffConstants.DIRECTORY_HEADER_LENGTH + entries.size() * TiffConstants.ENTRY_LENGTH + TiffConstants.DIRECTORY_FOOTER_LENGTH); this.type = type; this.entries = Collections.unmodifiableList(entries); this.nextDirectoryOffset = nextDirectoryOffset; this.headerByteOrder = byteOrder; } public String description() { return description(type); } public void dump() { entries.forEach(TiffField::dump); } public TiffField findField(final TagInfo tag) throws ImagingException { final boolean failIfMissing = false; return findField(tag, failIfMissing); } public TiffField findField(final TagInfo tag, final boolean failIfMissing) throws ImagingException { for (final TiffField field : entries) { if (field.getTag() == tag.tag) { return field; } } if (failIfMissing) { throw new ImagingException("Missing expected field: " + tag.getDescription()); } return null; } /** * Gets the byte order used by the source file for storing this directory and its content. * * @return A valid byte order instance. */ public ByteOrder getByteOrder() { return headerByteOrder; } public List<TiffField> getDirectoryEntries() { return new ArrayList<>(entries); } @Override public String getElementDescription() { long entryOffset = offset + TiffConstants.DIRECTORY_HEADER_LENGTH; final StringBuilder result = new StringBuilder(); for (final TiffField entry : entries) { result.append(String.format("\t[%d]: %s (%d, 0x%x), %s, %d: %s%n", entryOffset, entry.getTagInfo().name, entry.getTag(), entry.getTag(), entry.getFieldType().getName(), entry.getBytesLength(), entry.getValueDescription())); entryOffset += TiffConstants.ENTRY_LENGTH; } return result.toString(); } public Object getFieldValue(final TagInfo tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { return null; } return field.getValue(); } public String[] getFieldValue(final TagInfoAscii tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public byte getFieldValue(final TagInfoByte tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } return field.getByteArrayValue()[0]; } public byte[] getFieldValue(final TagInfoBytes tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } return field.getByteArrayValue(); } public double getFieldValue(final TagInfoDouble tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public double[] getFieldValue(final TagInfoDoubles tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public float getFieldValue(final TagInfoFloat tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public float[] getFieldValue(final TagInfoFloats tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public String getFieldValue(final TagInfoGpsText tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } return tag.getValue(field); } public int getFieldValue(final TagInfoLong tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public int[] getFieldValue(final TagInfoLongs tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public RationalNumber getFieldValue(final TagInfoRational tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public RationalNumber[] getFieldValue(final TagInfoRationals tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public byte getFieldValue(final TagInfoSByte tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } return field.getByteArrayValue()[0]; } public byte[] getFieldValue(final TagInfoSBytes tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } return field.getByteArrayValue(); } public short getFieldValue(final TagInfoShort tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public int[] getFieldValue(final TagInfoShortOrLong tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); if (field.getFieldType() == AbstractFieldType.SHORT) { return ByteConversions.toUInt16s(bytes, field.getByteOrder()); } return ByteConversions.toInts(bytes, field.getByteOrder()); } public short[] getFieldValue(final TagInfoShorts tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public int getFieldValue(final TagInfoSLong tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public int[] getFieldValue(final TagInfoSLongs tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public RationalNumber getFieldValue(final TagInfoSRational tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public RationalNumber[] getFieldValue(final TagInfoSRationals tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public short getFieldValue(final TagInfoSShort tag) throws ImagingException { final TiffField field = findField(tag); if (field == null) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } if (!tag.dataTypes.contains(field.getFieldType())) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } if (field.getCount() != 1) { throw new ImagingException("Field \"" + tag.name + "\" has wrong count " + field.getCount()); } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public short[] getFieldValue(final TagInfoSShorts tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } if (!tag.dataTypes.contains(field.getFieldType())) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" has incorrect type " + field.getFieldType().getName()); } return null; } final byte[] bytes = field.getByteArrayValue(); return tag.getValue(field.getByteOrder(), bytes); } public String getFieldValue(final TagInfoXpString tag, final boolean mustExist) throws ImagingException { final TiffField field = findField(tag); if (field == null) { if (mustExist) { throw new ImagingException("Required field \"" + tag.name + "\" is missing"); } return null; } return tag.getValue(field); } public JpegImageData getJpegImageData() { return jpegImageData; } public ImageDataElement getJpegRawImageDataElement() throws ImagingException { final TiffField jpegInterchangeFormat = findField(TiffTagConstants.TIFF_TAG_JPEG_INTERCHANGE_FORMAT); final TiffField jpegInterchangeFormatLength = findField(TiffTagConstants.TIFF_TAG_JPEG_INTERCHANGE_FORMAT_LENGTH); if (jpegInterchangeFormat != null && jpegInterchangeFormatLength != null) { final int offSet = jpegInterchangeFormat.getIntArrayValue()[0]; final int byteCount = jpegInterchangeFormatLength.getIntArrayValue()[0]; return new ImageDataElement(offSet, byteCount); } throw new ImagingException("Couldn't find image data."); } public long getNextDirectoryOffset() { return nextDirectoryOffset; } /** * Reads the numerical data stored in this TIFF directory, if available. Note that this method is defined only for TIFF directories that contain * floating-point data or two-byte signed integer data. * <p> * TIFF directories that provide numerical data do not directly specify images, though it is possible to interpret the data as an image using this library. * TIFF files may contain multiple directories which are allowed to have different formats. Thus it is possible for a TIFF file to contain a mix of image * and floating-point raster data. * <p> * If desired, sub-image data can be read from the file by using a Java Map instance to specify the subsection of the image that is required. The following * code illustrates the approach: * * <pre> * int x; // coordinate (column) of corner of sub-image * int y; // coordinate (row) of corner of sub-image * int width; // width of sub-image * int height; // height of sub-image * * Map&lt;String, Object&gt; params = new HashMap&lt;&gt;(); * params.put(TiffConstants.PARAM_KEY_SUBIMAGE_X, x); * params.put(TiffConstants.PARAM_KEY_SUBIMAGE_Y, y); * params.put(TiffConstants.PARAM_KEY_SUBIMAGE_WIDTH, width); * params.put(TiffConstants.PARAM_KEY_SUBIMAGE_HEIGHT, height); * TiffRasterData raster = directory.readFloatingPointRasterData(params); * </pre> * * @param params an optional parameter map instance * @return a valid instance * @throws ImagingException in the event of incompatible or malformed data * @throws IOException in the event of an I/O error */ public AbstractTiffRasterData getRasterData(final TiffImagingParameters params) throws ImagingException, IOException { final TiffImageParser parser = new TiffImageParser(); return parser.getRasterData(this, headerByteOrder, params); } private List<ImageDataElement> getRawImageDataElements(final TiffField offsetsField, final TiffField byteCountsField) throws ImagingException { final long[] offsets = offsetsField.getLongArrayValue(); final int[] byteCounts = byteCountsField.getIntArrayValue(); if (offsets.length != byteCounts.length) { throw new ImagingException("offsets.length(" + offsets.length + ") != byteCounts.length(" + byteCounts.length + ")"); } final List<ImageDataElement> result = Allocator.arrayList(offsets.length); for (int i = 0; i < offsets.length; i++) { result.add(new ImageDataElement(offsets[i], byteCounts[i])); } return result; } public String getSingleFieldValue(final TagInfoAscii tag) throws ImagingException { final String[] result = getFieldValue(tag, true); if (result.length != 1) { throw new ImagingException("Field \"" + tag.name + "\" has incorrect length " + result.length); } return result[0]; } public int getSingleFieldValue(final TagInfoShortOrLong tag) throws ImagingException { final int[] result = getFieldValue(tag, true); if (result.length != 1) { throw new ImagingException("Field \"" + tag.name + "\" has incorrect length " + result.length); } return result[0]; } /** * Gets the image associated with the directory, if any. Note that not all directories contain images. * * @return if successful, a valid BufferedImage instance. * @throws ImagingException in the event of an invalid or incompatible data format. * @throws IOException in the event of an I/O error. */ public BufferedImage getTiffImage() throws ImagingException, IOException { if (null == abstractTiffImageData) { return null; } return new TiffImageParser().getBufferedImage(this, headerByteOrder, null); } /** * Gets the image associated with the directory, if any. Note that not all directories contain images. * <p> * This method comes from an older version of this class in which byte order was required from an external source. Developers are encouraged to use the * simpler version of getTiffImage that does not require the byte-order argument. * * @param byteOrder byte-order obtained from the containing TIFF file * @return if successful, a valid BufferedImage instance. * @throws ImagingException in the event of an invalid or incompatible data format. * @throws IOException in the event of an I/O error. */ public BufferedImage getTiffImage(final ByteOrder byteOrder) throws ImagingException, IOException { return getTiffImage(byteOrder, new TiffImagingParameters()); } /** * Gets the image associated with the directory, if any. Note that not all directories contain images. * <p> * This method comes from an older version of this class in which byte order was required from an external source. Developers are encouraged to use the * simpler version of getTiffImage that does not require the byte-order argument. * * @param byteOrder byte-order obtained from the containing TIFF file * @param params an object containing optional parameters to be applied to the read operation. * @return if successful, a valid BufferedImage instance. * @throws ImagingException in the event of an invalid or incompatible data format. * @throws IOException in the event of an I/O error. */ public BufferedImage getTiffImage(final ByteOrder byteOrder, final TiffImagingParameters params) throws ImagingException, IOException { if (null == abstractTiffImageData) { return null; } return new TiffImageParser().getBufferedImage(this, byteOrder, params); } /** * Gets the image associated with the directory, if any. Note that not all directories contain images. * <p> * The optional parameters object can be used to specify image access or rendering options such as reading only a part of the overall image (i.e. reading a * sub-image) or applying a custom photometric interpreter. * * @param params an object containing optional parameters to be applied to the read operation. * @return if successful, a valid BufferedImage instance. * @throws ImagingException in the event of an invalid or incompatible data format. * @throws IOException in the event of an I/O error. */ public BufferedImage getTiffImage(final TiffImagingParameters params) throws ImagingException, IOException { if (null == abstractTiffImageData) { return null; } return new TiffImageParser().getBufferedImage(this, headerByteOrder, params); } public AbstractTiffImageData getTiffImageData() { return abstractTiffImageData; } public List<ImageDataElement> getTiffRawImageDataElements() throws ImagingException { final TiffField tileOffsets = findField(TiffTagConstants.TIFF_TAG_TILE_OFFSETS); final TiffField tileByteCounts = findField(TiffTagConstants.TIFF_TAG_TILE_BYTE_COUNTS); final TiffField stripOffsets = findField(TiffTagConstants.TIFF_TAG_STRIP_OFFSETS); final TiffField stripByteCounts = findField(TiffTagConstants.TIFF_TAG_STRIP_BYTE_COUNTS); if (tileOffsets != null && tileByteCounts != null) { return getRawImageDataElements(tileOffsets, tileByteCounts); } if (stripOffsets != null && stripByteCounts != null) { return getRawImageDataElements(stripOffsets, stripByteCounts); } throw new ImagingException("Couldn't find image data."); } public boolean hasJpegImageData() throws ImagingException { return null != findField(TiffTagConstants.TIFF_TAG_JPEG_INTERCHANGE_FORMAT); } /** * Indicates whether the directory definition specifies a float-point data format. * * @return {@code true} if the directory contains floating point data; otherwise, {@code false} * @throws ImagingException in the event of an invalid or malformed specification. */ public boolean hasTiffFloatingPointRasterData() throws ImagingException { if (!hasTiffImageData()) { return false; } final short[] s = getFieldValue(TiffTagConstants.TIFF_TAG_SAMPLE_FORMAT, false); return s != null && s.length > 0 && s[0] == TiffTagConstants.SAMPLE_FORMAT_VALUE_IEEE_FLOATING_POINT; } public boolean hasTiffImageData() throws ImagingException { if (null != findField(TiffTagConstants.TIFF_TAG_TILE_OFFSETS)) { return true; } return null != findField(TiffTagConstants.TIFF_TAG_STRIP_OFFSETS); } /** * Indicates whether the content associated with the directory is given in a supported numerical-data format. If this method returns {@code true}, the * Imaging API will be able to extract a TiffRasterData instance from the associated TIFF file using this directory. * * @return {@code true} if the directory contains a supported raster data format; otherwise, {@code false}. * @throws ImagingException in the event of an invalid or malformed specification. */ public boolean hasTiffRasterData() throws ImagingException { if (!hasTiffImageData()) { return false; } final short[] s = getFieldValue(TiffTagConstants.TIFF_TAG_SAMPLE_FORMAT, false); return s != null && s.length > 0 && (s[0] == TiffTagConstants.SAMPLE_FORMAT_VALUE_IEEE_FLOATING_POINT || s[0] == TiffTagConstants.SAMPLE_FORMAT_VALUE_TWOS_COMPLEMENT_SIGNED_INTEGER); } public boolean imageDataInStrips() throws ImagingException { final TiffField tileOffsets = findField(TiffTagConstants.TIFF_TAG_TILE_OFFSETS); final TiffField tileByteCounts = findField(TiffTagConstants.TIFF_TAG_TILE_BYTE_COUNTS); final TiffField stripOffsets = findField(TiffTagConstants.TIFF_TAG_STRIP_OFFSETS); final TiffField stripByteCounts = findField(TiffTagConstants.TIFF_TAG_STRIP_BYTE_COUNTS); if (tileOffsets != null && tileByteCounts != null) { return false; } if (stripOffsets != null && stripByteCounts != null) { return true; } throw new ImagingException("Couldn't find image data."); } @Override public Iterator<TiffField> iterator() { return entries.iterator(); } public void setJpegImageData(final JpegImageData value) { this.jpegImageData = value; } public void setTiffImageData(final AbstractTiffImageData rawImageData) { this.abstractTiffImageData = rawImageData; } public int size() { return entries.size(); } }
apache/flink
37,012
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/BaseMappingExtractor.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.types.extraction; import org.apache.flink.table.annotation.ArgumentHint; import org.apache.flink.table.annotation.ArgumentTrait; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.annotation.StateHint; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.data.RowData; import org.apache.flink.table.functions.UserDefinedFunction; import org.apache.flink.table.functions.UserDefinedFunctionHelper; import org.apache.flink.table.procedures.Procedure; import org.apache.flink.table.types.CollectionDataType; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.extraction.FunctionResultTemplate.FunctionOutputTemplate; import org.apache.flink.table.types.extraction.FunctionResultTemplate.FunctionStateTemplate; import org.apache.flink.table.types.extraction.FunctionResultTemplate.FunctionStateTemplate.StateInfoTemplate; import org.apache.flink.table.types.inference.StaticArgumentTrait; import org.apache.flink.types.Row; import org.apache.commons.lang3.ArrayUtils; import javax.annotation.Nullable; import java.lang.reflect.Method; import java.lang.reflect.Parameter; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.Arrays; import java.util.EnumSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static org.apache.flink.table.types.extraction.ExtractionUtils.createMethodSignatureString; import static org.apache.flink.table.types.extraction.ExtractionUtils.extractionError; import static org.apache.flink.table.types.extraction.TemplateUtils.findInputOnlyTemplates; import static org.apache.flink.table.types.extraction.TemplateUtils.findResultMappingTemplates; import static org.apache.flink.table.types.extraction.TemplateUtils.findResultOnlyTemplate; import static org.apache.flink.table.types.extraction.TemplateUtils.findResultOnlyTemplates; /** * Base utility for extracting function/procedure mappings from signature to result, e.g. from (INT, * STRING) to BOOLEAN. * * <p>It can not only be used for {@link UserDefinedFunction}, but also for {@link Procedure} which * is almost same to {@link UserDefinedFunction} with regard to extracting the mapping from * signature to result. */ abstract class BaseMappingExtractor { private static final EnumSet<StaticArgumentTrait> SCALAR_TRAITS = EnumSet.of(StaticArgumentTrait.SCALAR); protected final DataTypeFactory typeFactory; protected final String methodName; private final SignatureExtraction signatureExtraction; protected final ResultExtraction outputExtraction; protected final MethodVerification outputVerification; public BaseMappingExtractor( DataTypeFactory typeFactory, String methodName, SignatureExtraction signatureExtraction, ResultExtraction outputExtraction, MethodVerification outputVerification) { this.typeFactory = typeFactory; this.methodName = methodName; this.signatureExtraction = signatureExtraction; this.outputExtraction = outputExtraction; this.outputVerification = outputVerification; } Map<FunctionSignatureTemplate, FunctionOutputTemplate> extractOutputMapping() { try { return extractResultMappings( outputExtraction, FunctionTemplate::getOutputTemplate, outputVerification); } catch (Throwable t) { throw extractionError(t, "Error in extracting a signature to output mapping."); } } // -------------------------------------------------------------------------------------------- // Extraction strategies // -------------------------------------------------------------------------------------------- /** * Extraction that uses the method parameters for producing a {@link FunctionSignatureTemplate}. * * @param offset excludes the first n method parameters as arguments. This is necessary for * aggregating functions which don't require a {@link StateHint}. Accumulators are mandatory * and are kind of an implicit {@link StateHint}. */ static SignatureExtraction createArgumentsFromParametersExtraction( int offset, @Nullable Class<?> contextClass) { return (extractor, method) -> { final List<ArgumentParameter> args = extractArgumentParameters(method, offset, contextClass); final EnumSet<StaticArgumentTrait>[] argumentTraits = extractArgumentTraits(args); final List<FunctionArgumentTemplate> argumentTemplates = extractArgumentTemplates( extractor.typeFactory, extractor.getFunctionClass(), args); final String[] argumentNames = extractArgumentNames(method, args); final boolean[] argumentOptionals = extractArgumentOptionals(args); return FunctionSignatureTemplate.of( argumentTemplates, method.isVarArgs(), argumentTraits, argumentNames, argumentOptionals); }; } /** * Extraction that uses the method parameters for producing a {@link FunctionSignatureTemplate}. * * @param offset excludes the first n method parameters as arguments. This is necessary for * aggregating functions which don't require a {@link StateHint}. Accumulators are mandatory * and are kind of an implicit {@link StateHint}. */ static SignatureExtraction createArgumentsFromParametersExtraction(int offset) { return createArgumentsFromParametersExtraction(offset, null); } /** Extraction that uses the method parameters with {@link StateHint} for state entries. */ static ResultExtraction createStateFromParametersExtraction() { return (extractor, method) -> { final List<StateParameter> stateParameters = extractStateParameters(method); return createStateTemplateFromParameters(extractor, method, stateParameters); }; } /** * Extraction that uses a generic type variable for producing a {@link FunctionStateTemplate}. * Or method parameters with {@link StateHint} for state entries as a fallback. */ static ResultExtraction createStateFromGenericInClassOrParametersExtraction( Class<? extends UserDefinedFunction> baseClass, int genericPos) { return (extractor, method) -> { final List<StateParameter> stateParameters = extractStateParameters(method); if (stateParameters.isEmpty()) { final DataType dataType = DataTypeExtractor.extractFromGeneric( extractor.typeFactory, baseClass, genericPos, extractor.getFunctionClass()); final LinkedHashMap<String, StateInfoTemplate> state = new LinkedHashMap<>(); state.put( UserDefinedFunctionHelper.DEFAULT_ACCUMULATOR_NAME, StateInfoTemplate.of(dataType, null)); return FunctionResultTemplate.ofState(state); } return createStateTemplateFromParameters(extractor, method, stateParameters); }; } // -------------------------------------------------------------------------------------------- // Methods for subclasses // -------------------------------------------------------------------------------------------- protected abstract Set<FunctionTemplate> extractGlobalFunctionTemplates(); protected abstract Set<FunctionTemplate> extractLocalFunctionTemplates(Method method); protected abstract List<Method> collectMethods(String methodName); protected abstract Class<?> getFunctionClass(); protected abstract String getHintType(); protected static Class<?>[] assembleParameters( @Nullable FunctionStateTemplate state, FunctionSignatureTemplate arguments) { return Stream.concat( Optional.ofNullable(state) .map(FunctionStateTemplate::toClassList) .orElse(List.of()) .stream(), arguments.toClassList().stream()) .toArray(Class[]::new); } protected static ValidationException createMethodNotFoundError( String methodName, Class<?>[] parameters, @Nullable Class<?> returnType, String pattern) { return extractionError( "Considering all hints, the method should comply with the signature:\n%s%s", createMethodSignatureString(methodName, parameters, returnType), pattern.isEmpty() ? "" : "\nPattern: " + pattern); } /** * Extracts mappings from signature to result (either state or output) for the entire function. * Verifies if the extracted inference matches with the implementation. * * <p>For example, from {@code (INT, BOOLEAN, ANY) -> INT}. It does this by going through all * implementation methods and collecting all "per-method" mappings. The function mapping is the * union of all "per-method" mappings. */ @SuppressWarnings("unchecked") protected <T extends FunctionResultTemplate> Map<FunctionSignatureTemplate, T> extractResultMappings( ResultExtraction resultExtraction, Function<FunctionTemplate, FunctionResultTemplate> accessor, @Nullable MethodVerification verification) { final Set<FunctionTemplate> global = extractGlobalFunctionTemplates(); final Set<FunctionResultTemplate> globalResultOnly = findResultOnlyTemplates(global, accessor); // for each method find a signature that maps to results final Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings = new LinkedHashMap<>(); final List<Method> methods = collectMethods(methodName); if (methods.isEmpty()) { throw extractionError( "Could not find a publicly accessible method named '%s'.", methodName); } for (Method method : methods) { try { final Method correctMethod = correctVarArgMethod(method); final Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappingsPerMethod = collectMethodMappings( correctMethod, global, globalResultOnly, resultExtraction, accessor); // check if the method can be called verifyMappingForMethod(correctMethod, collectedMappingsPerMethod, verification); // check if method strategies conflict with function strategies collectedMappingsPerMethod.forEach( (signature, result) -> putMapping(collectedMappings, signature, result)); } catch (Throwable t) { throw extractionError( t, "Unable to extract a type inference from method:\n%s", method.toString()); } } return (Map<FunctionSignatureTemplate, T>) collectedMappings; } protected static void checkNoState(@Nullable FunctionStateTemplate state) { if (state != null) { throw extractionError("State is not supported for this kind of function."); } } protected static void checkSingleState(@Nullable FunctionStateTemplate state) { if (state == null || state.toClassList().size() != 1) { throw extractionError( "Aggregating functions need exactly one state entry for the accumulator."); } } protected static void checkScalarArgumentsOnly(FunctionSignatureTemplate arguments) { final EnumSet<StaticArgumentTrait>[] argumentTraits = arguments.argumentTraits; IntStream.range(0, argumentTraits.length) .forEach( pos -> { if (!argumentTraits[pos].equals(SCALAR_TRAITS)) { throw extractionError( "Only scalar arguments are supported at this location. " + "But argument '%s' declared the following traits: %s", arguments.argumentNames[pos], argumentTraits[pos]); } }); } // -------------------------------------------------------------------------------------------- // Helper methods // -------------------------------------------------------------------------------------------- private static FunctionStateTemplate createStateTemplateFromParameters( BaseMappingExtractor extractor, Method method, List<StateParameter> stateParameters) { final String[] argumentNames = extractStateNames(method, stateParameters); if (argumentNames == null) { throw extractionError("Unable to extract names for all state entries."); } final List<DataType> dataTypes = stateParameters.stream() .map( s -> DataTypeExtractor.extractFromMethodParameter( extractor.typeFactory, extractor.getFunctionClass(), s.method, s.pos)) .collect(Collectors.toList()); final LinkedHashMap<String, StateInfoTemplate> state = IntStream.range(0, argumentNames.length) .mapToObj( i -> { final DataType dataType = dataTypes.get(i); final StateHint hint = stateParameters .get(i) .parameter .getAnnotation(StateHint.class); return Map.entry( argumentNames[i], StateInfoTemplate.of(dataType, hint)); }) .collect( Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, (o, n) -> o, LinkedHashMap::new)); return FunctionResultTemplate.ofState(state); } /** * Extracts mappings from signature to result (either accumulator or output) for the given * method. It considers both global hints for the entire function and local hints just for this * method. * * <p>The algorithm aims to find an input signature for every declared result. If no result is * declared, it will be extracted. If no input signature is declared, it will be extracted. */ private Map<FunctionSignatureTemplate, FunctionResultTemplate> collectMethodMappings( Method method, Set<FunctionTemplate> global, Set<FunctionResultTemplate> globalResultOnly, ResultExtraction resultExtraction, Function<FunctionTemplate, FunctionResultTemplate> accessor) { final Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappingsPerMethod = new LinkedHashMap<>(); final Set<FunctionTemplate> local = extractLocalFunctionTemplates(method); final Set<FunctionResultTemplate> localResultOnly = findResultOnlyTemplates(local, accessor); final Set<FunctionTemplate> explicitMappings = findResultMappingTemplates(global, local, accessor); final FunctionResultTemplate resultOnly = findResultOnlyTemplate( globalResultOnly, localResultOnly, explicitMappings, accessor, getHintType()); final Set<FunctionSignatureTemplate> inputOnly = findInputOnlyTemplates(global, local, accessor); // add all explicit mappings because they contain complete signatures putExplicitMappings(collectedMappingsPerMethod, explicitMappings, inputOnly, accessor); // add result only template with explicit or extracted signatures putUniqueResultMappings(collectedMappingsPerMethod, resultOnly, inputOnly, method); // handle missing result by extraction with explicit or extracted signatures putExtractedResultMappings(collectedMappingsPerMethod, inputOnly, resultExtraction, method); return collectedMappingsPerMethod; } /** * Special case for Scala which generates two methods when using var-args (a {@code Seq < String * >} and {@code String...}). This method searches for the Java-like variant. */ private static Method correctVarArgMethod(Method method) { final int paramCount = method.getParameterCount(); final Class<?>[] paramClasses = method.getParameterTypes(); if (paramCount > 0 && paramClasses[paramCount - 1].getName().equals("scala.collection.Seq")) { final Type[] paramTypes = method.getGenericParameterTypes(); final ParameterizedType seqType = (ParameterizedType) paramTypes[paramCount - 1]; final Type varArgType = seqType.getActualTypeArguments()[0]; return ExtractionUtils.collectMethods(method.getDeclaringClass(), method.getName()) .stream() .filter(Method::isVarArgs) .filter(candidate -> candidate.getParameterCount() == paramCount) .filter( candidate -> { final Type[] candidateParamTypes = candidate.getGenericParameterTypes(); for (int i = 0; i < paramCount - 1; i++) { if (candidateParamTypes[i] != paramTypes[i]) { return false; } } final Class<?> candidateVarArgType = candidate.getParameterTypes()[paramCount - 1]; return candidateVarArgType.isArray() && // check for Object is needed in case of Scala primitives // (e.g. Int) (varArgType == Object.class || candidateVarArgType.getComponentType() == varArgType); }) .findAny() .orElse(method); } return method; } /** Explicit mappings with complete signature to result declaration. */ private void putExplicitMappings( Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings, Set<FunctionTemplate> explicitMappings, Set<FunctionSignatureTemplate> signatureOnly, Function<FunctionTemplate, FunctionResultTemplate> accessor) { explicitMappings.forEach( t -> { // signature templates are valid everywhere and are added to the explicit // mapping Stream.concat(signatureOnly.stream(), Stream.of(t.getSignatureTemplate())) .forEach(v -> putMapping(collectedMappings, v, accessor.apply(t))); }); } /** Result only template with explicit or extracted signatures. */ private void putUniqueResultMappings( Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings, @Nullable FunctionResultTemplate uniqueResult, Set<FunctionSignatureTemplate> signatureOnly, Method method) { if (uniqueResult == null) { return; } // input only templates are valid everywhere if they don't exist fallback to extraction if (!signatureOnly.isEmpty()) { signatureOnly.forEach(s -> putMapping(collectedMappings, s, uniqueResult)); } else { putMapping(collectedMappings, signatureExtraction.extract(this, method), uniqueResult); } } /** Missing result by extraction with explicit or extracted signatures. */ private void putExtractedResultMappings( Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings, Set<FunctionSignatureTemplate> inputOnly, ResultExtraction resultExtraction, Method method) { if (!collectedMappings.isEmpty()) { return; } final FunctionResultTemplate result = resultExtraction.extract(this, method); // input only validators are valid everywhere if they don't exist fallback to extraction if (!inputOnly.isEmpty()) { inputOnly.forEach(signature -> putMapping(collectedMappings, signature, result)); } else { final FunctionSignatureTemplate signature = signatureExtraction.extract(this, method); putMapping(collectedMappings, signature, result); } } private void putMapping( Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappings, FunctionSignatureTemplate signature, FunctionResultTemplate result) { final FunctionResultTemplate existingResult = collectedMappings.get(signature); if (existingResult == null) { collectedMappings.put(signature, result); } // template must not conflict with same input else if (!existingResult.equals(result)) { throw extractionError( String.format( "%s hints with same input definition but different result types are not allowed.", getHintType())); } } /** Checks if the given method can be called and returns what hints declare. */ private void verifyMappingForMethod( Method method, Map<FunctionSignatureTemplate, FunctionResultTemplate> collectedMappingsPerMethod, @Nullable MethodVerification verification) { if (verification == null) { return; } collectedMappingsPerMethod.forEach( (signature, result) -> { if (result instanceof FunctionStateTemplate) { final FunctionStateTemplate stateTemplate = (FunctionStateTemplate) result; verification.verify(method, stateTemplate, signature, null); } else if (result instanceof FunctionOutputTemplate) { final FunctionOutputTemplate outputTemplate = (FunctionOutputTemplate) result; verification.verify(method, null, signature, outputTemplate); } }); } // -------------------------------------------------------------------------------------------- // Parameters extraction (i.e. state and arguments) // -------------------------------------------------------------------------------------------- /** Method parameter that qualifies as a function argument (i.e. not a context or state). */ private static class ArgumentParameter { final Parameter parameter; final Method method; // Pos in the method, not necessarily in the extracted function final int pos; private ArgumentParameter(Parameter parameter, Method method, int pos) { this.parameter = parameter; this.method = method; this.pos = pos; } } /** Method parameter that qualifies as a function state (i.e. not a context or argument). */ private static class StateParameter { final Parameter parameter; final Method method; // Pos in the method, not necessarily in the extracted function final int pos; private StateParameter(Parameter parameter, Method method, int pos) { this.parameter = parameter; this.method = method; this.pos = pos; } } private static List<ArgumentParameter> extractArgumentParameters( Method method, int offset, @Nullable Class<?> contextClass) { final Parameter[] parameters = method.getParameters(); return IntStream.range(0, parameters.length) .mapToObj( pos -> { final Parameter parameter = parameters[pos]; return new ArgumentParameter(parameter, method, pos); }) .skip(offset) .filter(arg -> contextClass == null || arg.parameter.getType() != contextClass) .filter(arg -> arg.parameter.getAnnotation(StateHint.class) == null) .collect(Collectors.toList()); } private static List<StateParameter> extractStateParameters(Method method) { final Parameter[] parameters = method.getParameters(); return IntStream.range(0, parameters.length) .mapToObj( pos -> { final Parameter parameter = parameters[pos]; return new StateParameter(parameter, method, pos); }) .filter(arg -> arg.parameter.getAnnotation(StateHint.class) != null) .collect(Collectors.toList()); } private static List<FunctionArgumentTemplate> extractArgumentTemplates( DataTypeFactory typeFactory, Class<?> extractedClass, List<ArgumentParameter> args) { return args.stream() .map( arg -> // check for input group before start extracting a data type tryExtractInputGroupArgument(arg) .orElseGet( () -> extractArgumentByKind( typeFactory, extractedClass, arg))) .collect(Collectors.toList()); } private static Optional<FunctionArgumentTemplate> tryExtractInputGroupArgument( ArgumentParameter arg) { final DataTypeHint dataTypehint = arg.parameter.getAnnotation(DataTypeHint.class); final ArgumentHint argumentHint = arg.parameter.getAnnotation(ArgumentHint.class); if (dataTypehint != null && argumentHint != null) { throw extractionError( "Argument and data type hints cannot be declared at the same time at position %d.", arg.pos); } if (argumentHint != null) { final DataTypeTemplate template = DataTypeTemplate.fromAnnotation(argumentHint); if (template.inputGroup != null) { return Optional.of(FunctionArgumentTemplate.ofInputGroup(template.inputGroup)); } } else if (dataTypehint != null) { final DataTypeTemplate template = DataTypeTemplate.fromAnnotation(dataTypehint, null); if (template.inputGroup != null) { return Optional.of(FunctionArgumentTemplate.ofInputGroup(template.inputGroup)); } } return Optional.empty(); } private static FunctionArgumentTemplate extractArgumentByKind( DataTypeFactory typeFactory, Class<?> extractedClass, ArgumentParameter arg) { final Parameter parameter = arg.parameter; final ArgumentHint argumentHint = parameter.getAnnotation(ArgumentHint.class); final int pos = arg.pos; final Set<ArgumentTrait> rootTrait = Optional.ofNullable(argumentHint) .map( hint -> Arrays.stream(hint.value()) .filter(ArgumentTrait::isRoot) .collect(Collectors.toSet())) .orElse(Set.of(ArgumentTrait.SCALAR)); if (rootTrait.size() != 1) { throw extractionError( "Incorrect argument kind at position %d. Argument kind must be one of: %s", pos, Arrays.stream(ArgumentTrait.values()) .filter(ArgumentTrait::isRoot) .collect(Collectors.toList())); } if (rootTrait.contains(ArgumentTrait.SCALAR)) { return extractScalarArgument(typeFactory, extractedClass, arg); } else if (rootTrait.contains(ArgumentTrait.ROW_SEMANTIC_TABLE) || rootTrait.contains(ArgumentTrait.SET_SEMANTIC_TABLE)) { return extractTableArgument(typeFactory, argumentHint, extractedClass, arg); } else { throw extractionError("Unknown argument kind."); } } private static FunctionArgumentTemplate extractTableArgument( DataTypeFactory typeFactory, ArgumentHint argumentHint, Class<?> extractedClass, ArgumentParameter arg) { try { final DataType type = DataTypeExtractor.extractFromMethodParameter( typeFactory, extractedClass, arg.method, arg.pos); return FunctionArgumentTemplate.ofDataType(type); } catch (Throwable t) { final Class<?> paramClass = arg.parameter.getType(); final Class<?> argClass = argumentHint.type().bridgedTo(); if (argClass == Row.class || argClass == RowData.class) { return FunctionArgumentTemplate.ofTable(argClass); } if (paramClass == Row.class || paramClass == RowData.class) { return FunctionArgumentTemplate.ofTable(paramClass); } // Just a regular error for a typed argument throw t; } } private static FunctionArgumentTemplate extractScalarArgument( DataTypeFactory typeFactory, Class<?> extractedClass, ArgumentParameter arg) { final DataType type = DataTypeExtractor.extractFromMethodParameter( typeFactory, extractedClass, arg.method, arg.pos); // unwrap data type in case of varargs if (arg.parameter.isVarArgs()) { // for ARRAY if (type instanceof CollectionDataType) { return FunctionArgumentTemplate.ofDataType( ((CollectionDataType) type).getElementDataType()); } // special case for varargs that have been misinterpreted as BYTES else if (type.equals(DataTypes.BYTES())) { return FunctionArgumentTemplate.ofDataType( DataTypes.TINYINT().notNull().bridgedTo(byte.class)); } } return FunctionArgumentTemplate.ofDataType(type); } @SuppressWarnings("unchecked") private static EnumSet<StaticArgumentTrait>[] extractArgumentTraits( List<ArgumentParameter> args) { return args.stream() .map( arg -> { final ArgumentHint argumentHint = arg.parameter.getAnnotation(ArgumentHint.class); if (argumentHint == null) { return SCALAR_TRAITS; } final List<StaticArgumentTrait> traits = Arrays.stream(argumentHint.value()) .map(ArgumentTrait::toStaticTrait) .collect(Collectors.toList()); return EnumSet.copyOf(traits); }) .toArray(EnumSet[]::new); } private static @Nullable String[] extractArgumentNames( Method method, List<ArgumentParameter> args) { final List<String> methodParameterNames = ExtractionUtils.extractMethodParameterNames(method); if (methodParameterNames != null) { return args.stream() .map(arg -> methodParameterNames.get(arg.pos)) .toArray(String[]::new); } else { return null; } } private static @Nullable String[] extractStateNames(Method method, List<StateParameter> state) { final List<String> methodParameterNames = ExtractionUtils.extractMethodParameterNames(method); if (methodParameterNames != null) { return state.stream() .map(arg -> methodParameterNames.get(arg.pos)) .toArray(String[]::new); } else { return null; } } private static boolean[] extractArgumentOptionals(List<ArgumentParameter> args) { final Boolean[] argumentOptionals = args.stream() .map(arg -> arg.parameter.getAnnotation(ArgumentHint.class)) .map( hint -> { if (hint == null) { return false; } return hint.isOptional(); }) .toArray(Boolean[]::new); return ArrayUtils.toPrimitive(argumentOptionals); } // -------------------------------------------------------------------------------------------- // Helper interfaces // -------------------------------------------------------------------------------------------- /** Extracts a {@link FunctionSignatureTemplate} from a method. */ interface SignatureExtraction { FunctionSignatureTemplate extract(BaseMappingExtractor extractor, Method method); } /** Extracts a {@link FunctionResultTemplate} from a class or method. */ interface ResultExtraction { @Nullable FunctionResultTemplate extract(BaseMappingExtractor extractor, Method method); } /** Verifies the signature of a method. */ interface MethodVerification { void verify( Method method, @Nullable FunctionStateTemplate state, FunctionSignatureTemplate arguments, @Nullable FunctionOutputTemplate result); } }
apache/lucene
36,735
lucene/core/src/java/org/apache/lucene/codecs/DocValuesConsumer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.codecs; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocIDMerger; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.OrdinalMap; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.SegmentWriteState; // javadocs import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.LongValues; import org.apache.lucene.util.packed.PackedInts; /** * Abstract API that consumes numeric, binary and sorted docvalues. Concrete implementations of this * actually do "something" with the docvalues (write it into the index in a specific format). * * <p>The lifecycle is: * * <ol> * <li>DocValuesConsumer is created by {@link DocValuesFormat#fieldsConsumer(SegmentWriteState)}. * <li>{@link #addNumericField}, {@link #addBinaryField}, {@link #addSortedField}, {@link * #addSortedSetField}, or {@link #addSortedNumericField} are called for each Numeric, Binary, * Sorted, SortedSet, or SortedNumeric docvalues field. The API is a "pull" rather than * "push", and the implementation is free to iterate over the values multiple times ({@link * Iterable#iterator()}). * <li>After all fields are added, the consumer is {@link #close}d. * </ol> * * @lucene.experimental */ public abstract class DocValuesConsumer implements Closeable { /** Sole constructor. (For invocation by subclass constructors, typically implicit.) */ protected DocValuesConsumer() {} /** * Writes numeric docvalues for a field. * * @param field field information * @param valuesProducer Numeric values to write. * @throws IOException if an I/O error occurred. */ public abstract void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException; /** * Writes binary docvalues for a field. * * @param field field information * @param valuesProducer Binary values to write. * @throws IOException if an I/O error occurred. */ public abstract void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException; /** * Writes pre-sorted binary docvalues for a field. * * @param field field information * @param valuesProducer produces the values and ordinals to write * @throws IOException if an I/O error occurred. */ public abstract void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException; /** * Writes pre-sorted numeric docvalues for a field * * @param field field information * @param valuesProducer produces the values to write * @throws IOException if an I/O error occurred. */ public abstract void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException; /** * Writes pre-sorted set docvalues for a field * * @param field field information * @param valuesProducer produces the values to write * @throws IOException if an I/O error occurred. */ public abstract void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException; /** * Merges in the fields from the readers in <code>mergeState</code>. The default implementation * calls {@link #mergeNumericField}, {@link #mergeBinaryField}, {@link #mergeSortedField}, {@link * #mergeSortedSetField}, or {@link #mergeSortedNumericField} for each field, depending on its * type. Implementations can override this method for more sophisticated merging (bulk-byte * copying, etc). */ public void merge(MergeState mergeState) throws IOException { for (DocValuesProducer docValuesProducer : mergeState.docValuesProducers) { if (docValuesProducer != null) { docValuesProducer.checkIntegrity(); } } for (FieldInfo mergeFieldInfo : mergeState.mergeFieldInfos) { DocValuesType type = mergeFieldInfo.getDocValuesType(); if (type != DocValuesType.NONE) { if (type == DocValuesType.NUMERIC) { mergeNumericField(mergeFieldInfo, mergeState); } else if (type == DocValuesType.BINARY) { mergeBinaryField(mergeFieldInfo, mergeState); } else if (type == DocValuesType.SORTED) { mergeSortedField(mergeFieldInfo, mergeState); } else if (type == DocValuesType.SORTED_SET) { mergeSortedSetField(mergeFieldInfo, mergeState); } else if (type == DocValuesType.SORTED_NUMERIC) { mergeSortedNumericField(mergeFieldInfo, mergeState); } else { throw new AssertionError("type=" + type); } } } } /** Tracks state of one numeric sub-reader that we are merging */ private static class NumericDocValuesSub extends DocIDMerger.Sub { final NumericDocValues values; public NumericDocValuesSub(MergeState.DocMap docMap, NumericDocValues values) { super(docMap); this.values = values; assert values.docID() == -1; } @Override public int nextDoc() throws IOException { return values.nextDoc(); } } /** * Merges the numeric docvalues from <code>MergeState</code>. * * <p>The default implementation calls {@link #addNumericField}, passing a DocValuesProducer that * merges and filters deleted documents on the fly. */ public void mergeNumericField(final FieldInfo mergeFieldInfo, final MergeState mergeState) throws IOException { addNumericField( mergeFieldInfo, new EmptyDocValuesProducer() { @Override public NumericDocValues getNumeric(FieldInfo fieldInfo) throws IOException { if (fieldInfo != mergeFieldInfo) { throw new IllegalArgumentException("wrong fieldInfo"); } return getMergedNumericDocValues(mergeState, mergeFieldInfo); } }); } /** * Returns a merged numeric doc values instance from all producers in the provided merge state. * * @lucene.experimental */ protected static NumericDocValues getMergedNumericDocValues( MergeState mergeState, FieldInfo mergeFieldInfo) throws IOException { List<NumericDocValuesSub> subs = new ArrayList<>(); assert mergeState.docMaps.length == mergeState.docValuesProducers.length; for (int i = 0; i < mergeState.docValuesProducers.length; i++) { NumericDocValues values = null; DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; if (docValuesProducer != null) { FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(mergeFieldInfo.name); if (readerFieldInfo != null && readerFieldInfo.getDocValuesType() == DocValuesType.NUMERIC) { values = docValuesProducer.getNumeric(readerFieldInfo); } } if (values != null) { subs.add(new NumericDocValuesSub(mergeState.docMaps[i], values)); } } return mergeNumericValues(subs, mergeState.needsIndexSort); } private static NumericDocValues mergeNumericValues( List<NumericDocValuesSub> subs, boolean indexIsSorted) throws IOException { long cost = 0; for (NumericDocValuesSub sub : subs) { cost += sub.values.cost(); } final long finalCost = cost; final DocIDMerger<NumericDocValuesSub> docIDMerger = DocIDMerger.of(subs, indexIsSorted); return new NumericDocValues() { private int docID = -1; private NumericDocValuesSub current; @Override public int docID() { return docID; } @Override public int nextDoc() throws IOException { current = docIDMerger.next(); if (current == null) { docID = NO_MORE_DOCS; } else { docID = current.mappedDocID; } return docID; } @Override public int advance(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public boolean advanceExact(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public long cost() { return finalCost; } @Override public long longValue() throws IOException { return current.values.longValue(); } }; } /** Tracks state of one binary sub-reader that we are merging */ private static class BinaryDocValuesSub extends DocIDMerger.Sub { final BinaryDocValues values; public BinaryDocValuesSub(MergeState.DocMap docMap, BinaryDocValues values) { super(docMap); this.values = values; assert values.docID() == -1; } @Override public int nextDoc() throws IOException { return values.nextDoc(); } } /** * Merges the binary docvalues from <code>MergeState</code>. * * <p>The default implementation calls {@link #addBinaryField}, passing a DocValuesProducer that * merges and filters deleted documents on the fly. */ public void mergeBinaryField(FieldInfo mergeFieldInfo, final MergeState mergeState) throws IOException { addBinaryField( mergeFieldInfo, new EmptyDocValuesProducer() { @Override public BinaryDocValues getBinary(FieldInfo fieldInfo) throws IOException { if (fieldInfo != mergeFieldInfo) { throw new IllegalArgumentException("wrong fieldInfo"); } return getMergedBinaryDocValues(mergeFieldInfo, mergeState); } }); } /** * Returns a merged binary doc values instance from all producers in the provided merge state. * * @lucene.experimental */ protected static BinaryDocValues getMergedBinaryDocValues( FieldInfo mergeFieldInfo, final MergeState mergeState) throws IOException { List<BinaryDocValuesSub> subs = new ArrayList<>(); long cost = 0; for (int i = 0; i < mergeState.docValuesProducers.length; i++) { BinaryDocValues values = null; DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; if (docValuesProducer != null) { FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(mergeFieldInfo.name); if (readerFieldInfo != null && readerFieldInfo.getDocValuesType() == DocValuesType.BINARY) { values = docValuesProducer.getBinary(readerFieldInfo); } } if (values != null) { cost += values.cost(); subs.add(new BinaryDocValuesSub(mergeState.docMaps[i], values)); } } final DocIDMerger<BinaryDocValuesSub> docIDMerger = DocIDMerger.of(subs, mergeState.needsIndexSort); final long finalCost = cost; return new BinaryDocValues() { private BinaryDocValuesSub current; private int docID = -1; @Override public int docID() { return docID; } @Override public int nextDoc() throws IOException { current = docIDMerger.next(); if (current == null) { docID = NO_MORE_DOCS; } else { docID = current.mappedDocID; } return docID; } @Override public int advance(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public boolean advanceExact(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public long cost() { return finalCost; } @Override public BytesRef binaryValue() throws IOException { return current.values.binaryValue(); } }; } /** Tracks state of one sorted numeric sub-reader that we are merging */ private static class SortedNumericDocValuesSub extends DocIDMerger.Sub { final SortedNumericDocValues values; public SortedNumericDocValuesSub(MergeState.DocMap docMap, SortedNumericDocValues values) { super(docMap); this.values = values; assert values.docID() == -1; } @Override public int nextDoc() throws IOException { return values.nextDoc(); } } /** * Merges the sorted docvalues from <code>toMerge</code>. * * <p>The default implementation calls {@link #addSortedNumericField}, passing iterables that * filter deleted documents. */ public void mergeSortedNumericField(FieldInfo mergeFieldInfo, final MergeState mergeState) throws IOException { addSortedNumericField( mergeFieldInfo, new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo fieldInfo) throws IOException { if (fieldInfo != mergeFieldInfo) { throw new IllegalArgumentException("wrong FieldInfo"); } return getMergedSortedNumericDocValues(mergeFieldInfo, mergeState); } }); } /** * Returns a merged sorted numeric doc values instance from all producers in the provided merge * state. * * @lucene.experimental */ protected static SortedNumericDocValues getMergedSortedNumericDocValues( FieldInfo mergeFieldInfo, MergeState mergeState) throws IOException { // We must make new iterators + DocIDMerger for each iterator: List<SortedNumericDocValuesSub> subs = new ArrayList<>(); long cost = 0; boolean allSingletons = true; for (int i = 0; i < mergeState.docValuesProducers.length; i++) { DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; SortedNumericDocValues values = null; if (docValuesProducer != null) { FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(mergeFieldInfo.name); if (readerFieldInfo != null && readerFieldInfo.getDocValuesType() == DocValuesType.SORTED_NUMERIC) { values = docValuesProducer.getSortedNumeric(readerFieldInfo); } } if (values == null) { values = DocValues.emptySortedNumeric(); } cost += values.cost(); if (allSingletons && DocValues.unwrapSingleton(values) == null) { allSingletons = false; } subs.add(new SortedNumericDocValuesSub(mergeState.docMaps[i], values)); } if (allSingletons) { // All subs are single-valued. // We specialize for that case since it makes it easier for codecs to optimize // for single-valued fields. List<NumericDocValuesSub> singleValuedSubs = new ArrayList<>(); for (SortedNumericDocValuesSub sub : subs) { final NumericDocValues singleValuedValues = DocValues.unwrapSingleton(sub.values); assert singleValuedValues != null; singleValuedSubs.add(new NumericDocValuesSub(sub.docMap, singleValuedValues)); } return DocValues.singleton(mergeNumericValues(singleValuedSubs, mergeState.needsIndexSort)); } final long finalCost = cost; final DocIDMerger<SortedNumericDocValuesSub> docIDMerger = DocIDMerger.of(subs, mergeState.needsIndexSort); return new SortedNumericDocValues() { private int docID = -1; private SortedNumericDocValuesSub currentSub; @Override public int docID() { return docID; } @Override public int nextDoc() throws IOException { currentSub = docIDMerger.next(); if (currentSub == null) { docID = NO_MORE_DOCS; } else { docID = currentSub.mappedDocID; } return docID; } @Override public int advance(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public boolean advanceExact(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public int docValueCount() { return currentSub.values.docValueCount(); } @Override public long cost() { return finalCost; } @Override public long nextValue() throws IOException { return currentSub.values.nextValue(); } }; } /** * A merged {@link TermsEnum}. This helps avoid relying on the default terms enum, which calls * {@link SortedDocValues#lookupOrd(int)} or {@link SortedSetDocValues#lookupOrd(long)} on every * call to {@link TermsEnum#next()}. */ private static class MergedTermsEnum extends BaseTermsEnum { private final TermsEnum[] subs; private final OrdinalMap ordinalMap; private final long valueCount; private long ord = -1; private BytesRef term; MergedTermsEnum(OrdinalMap ordinalMap, TermsEnum[] subs) { this.ordinalMap = ordinalMap; this.subs = subs; this.valueCount = ordinalMap.getValueCount(); } @Override public BytesRef term() throws IOException { return term; } @Override public long ord() throws IOException { return ord; } @Override public BytesRef next() throws IOException { if (++ord >= valueCount) { return null; } final int subNum = ordinalMap.getFirstSegmentNumber(ord); final TermsEnum sub = subs[subNum]; final long subOrd = ordinalMap.getFirstSegmentOrd(ord); do { term = sub.next(); } while (sub.ord() < subOrd); assert sub.ord() == subOrd; return term; } @Override public AttributeSource attributes() { throw new UnsupportedOperationException(); } @Override public SeekStatus seekCeil(BytesRef text) throws IOException { throw new UnsupportedOperationException(); } @Override public void seekExact(long ord) throws IOException { throw new UnsupportedOperationException(); } @Override public int docFreq() throws IOException { throw new UnsupportedOperationException(); } @Override public long totalTermFreq() throws IOException { throw new UnsupportedOperationException(); } @Override public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { throw new UnsupportedOperationException(); } @Override public ImpactsEnum impacts(int flags) throws IOException { throw new UnsupportedOperationException(); } @Override public TermState termState() throws IOException { throw new UnsupportedOperationException(); } } /** Tracks state of one sorted sub-reader that we are merging */ private static class SortedDocValuesSub extends DocIDMerger.Sub { final SortedDocValues values; final LongValues map; public SortedDocValuesSub(MergeState.DocMap docMap, SortedDocValues values, LongValues map) { super(docMap); this.values = values; this.map = map; assert values.docID() == -1; } @Override public int nextDoc() throws IOException { return values.nextDoc(); } } /** * Merges the sorted docvalues from <code>toMerge</code>. * * <p>The default implementation calls {@link #addSortedField}, passing an Iterable that merges * ordinals and values and filters deleted documents . */ public void mergeSortedField(FieldInfo fieldInfo, final MergeState mergeState) throws IOException { // step 1: iterate thru each sub and mark terms still in use // step 2: create ordinal map (this conceptually does the "merging") final OrdinalMap map = createOrdinalMapForSortedDV(fieldInfo, mergeState); // step 3: add field addSortedField( fieldInfo, new EmptyDocValuesProducer() { @Override public SortedDocValues getSorted(FieldInfo fieldInfoIn) throws IOException { if (fieldInfoIn != fieldInfo) { throw new IllegalArgumentException("wrong FieldInfo"); } return getMergedSortedSetDocValues(fieldInfo, mergeState, map); } }); } /** * Returns a merged sorted doc values instance from all producers in the provided merge state. * * @lucene.experimental */ protected static OrdinalMap createOrdinalMapForSortedDV( FieldInfo fieldInfo, MergeState mergeState) throws IOException { List<SortedDocValues> toMerge = new ArrayList<>(); for (int i = 0; i < mergeState.docValuesProducers.length; i++) { SortedDocValues values = null; DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; if (docValuesProducer != null) { FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(fieldInfo.name); if (readerFieldInfo != null && readerFieldInfo.getDocValuesType() == DocValuesType.SORTED) { values = docValuesProducer.getSorted(readerFieldInfo); } } if (values == null) { values = DocValues.emptySorted(); } toMerge.add(values); } final int numReaders = toMerge.size(); final SortedDocValues[] dvs = toMerge.toArray(new SortedDocValues[numReaders]); TermsEnum[] liveTerms = new TermsEnum[dvs.length]; long[] weights = new long[liveTerms.length]; for (int sub = 0; sub < numReaders; sub++) { SortedDocValues dv = dvs[sub]; Bits liveDocs = mergeState.liveDocs[sub]; if (liveDocs == null) { liveTerms[sub] = dv.termsEnum(); weights[sub] = dv.getValueCount(); } else { LongBitSet bitset = new LongBitSet(dv.getValueCount()); int docID; while ((docID = dv.nextDoc()) != NO_MORE_DOCS) { if (liveDocs.get(docID)) { int ord = dv.ordValue(); if (ord >= 0) { bitset.set(ord); } } } liveTerms[sub] = new BitsFilteredTermsEnum(dv.termsEnum(), bitset); weights[sub] = bitset.cardinality(); } } return OrdinalMap.build(null, liveTerms, weights, PackedInts.COMPACT); } /** * Returns a merged sorted doc values instance from all producers in the provided merge state. * * @lucene.experimental */ protected static SortedDocValues getMergedSortedSetDocValues( FieldInfo fieldInfo, MergeState mergeState, OrdinalMap map) throws IOException { // We must make new iterators + DocIDMerger for each iterator: List<SortedDocValuesSub> subs = new ArrayList<>(); for (int i = 0; i < mergeState.docValuesProducers.length; i++) { SortedDocValues values = null; DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; if (docValuesProducer != null) { FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(fieldInfo.name); if (readerFieldInfo != null && readerFieldInfo.getDocValuesType() == DocValuesType.SORTED) { values = docValuesProducer.getSorted(readerFieldInfo); } } if (values == null) { values = DocValues.emptySorted(); } subs.add(new SortedDocValuesSub(mergeState.docMaps[i], values, map.getGlobalOrds(i))); } return mergeSortedValues(subs, mergeState, map); } /** * Returns a merged sorted doc values instance from all producers in the provided merge state. * * @lucene.experimental */ protected static SortedDocValues mergeSortedValues( List<SortedDocValuesSub> subs, MergeState mergeState, OrdinalMap map) throws IOException { long cost = 0; for (SortedDocValuesSub sub : subs) { cost += sub.values.cost(); } final long finalCost = cost; final DocIDMerger<SortedDocValuesSub> docIDMerger = DocIDMerger.of(subs, mergeState.needsIndexSort); return new SortedDocValues() { private int docID = -1; private SortedDocValuesSub current; @Override public int docID() { return docID; } @Override public int nextDoc() throws IOException { current = docIDMerger.next(); if (current == null) { docID = NO_MORE_DOCS; } else { docID = current.mappedDocID; } return docID; } @Override public int ordValue() throws IOException { int subOrd = current.values.ordValue(); assert subOrd != -1; return (int) current.map.get(subOrd); } @Override public int advance(int target) { throw new UnsupportedOperationException(); } @Override public boolean advanceExact(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public long cost() { return finalCost; } @Override public int getValueCount() { return (int) map.getValueCount(); } @Override public BytesRef lookupOrd(int ord) throws IOException { int segmentNumber = map.getFirstSegmentNumber(ord); int segmentOrd = (int) map.getFirstSegmentOrd(ord); return subs.get(segmentNumber).values.lookupOrd(segmentOrd); } @Override public TermsEnum termsEnum() throws IOException { TermsEnum[] termsEnurmSubs = new TermsEnum[subs.size()]; for (int sub = 0; sub < termsEnurmSubs.length; ++sub) { termsEnurmSubs[sub] = subs.get(sub).values.termsEnum(); } return new MergedTermsEnum(map, termsEnurmSubs); } }; } /** Tracks state of one sorted set sub-reader that we are merging */ private static class SortedSetDocValuesSub extends DocIDMerger.Sub { final SortedSetDocValues values; final LongValues map; public SortedSetDocValuesSub( MergeState.DocMap docMap, SortedSetDocValues values, LongValues map) { super(docMap); this.values = values; this.map = map; assert values.docID() == -1; } @Override public int nextDoc() throws IOException { return values.nextDoc(); } @Override public String toString() { return "SortedSetDocValuesSub(mappedDocID=" + mappedDocID + " values=" + values + ")"; } } /** * Merges the sortedset docvalues from <code>toMerge</code>. * * <p>The default implementation calls {@link #addSortedSetField}, passing an Iterable that merges * ordinals and values and filters deleted documents . */ public void mergeSortedSetField(FieldInfo mergeFieldInfo, final MergeState mergeState) throws IOException { // step 1: iterate thru each sub and mark terms still in use // step 2: create ordinal map (this conceptually does the "merging") List<SortedSetDocValues> toMerge = selectLeavesToMerge(mergeFieldInfo, mergeState); OrdinalMap map = createOrdinalMapForSortedSetDV(toMerge, mergeState); // step 3: add field addSortedSetField( mergeFieldInfo, new EmptyDocValuesProducer() { @Override public SortedSetDocValues getSortedSet(FieldInfo fieldInfo) throws IOException { if (fieldInfo != mergeFieldInfo) { throw new IllegalArgumentException("wrong FieldInfo"); } return getMergedSortedSetDocValues(mergeFieldInfo, mergeState, map, toMerge); } }); } /** * Creates an ordinal map based on the provided sorted set doc values to merges * * @lucene.experimental */ protected static OrdinalMap createOrdinalMapForSortedSetDV( List<SortedSetDocValues> toMerge, MergeState mergeState) throws IOException { TermsEnum[] liveTerms = new TermsEnum[toMerge.size()]; long[] weights = new long[liveTerms.length]; for (int sub = 0; sub < liveTerms.length; sub++) { SortedSetDocValues dv = toMerge.get(sub); Bits liveDocs = mergeState.liveDocs[sub]; if (liveDocs == null) { liveTerms[sub] = dv.termsEnum(); weights[sub] = dv.getValueCount(); } else { LongBitSet bitset = new LongBitSet(dv.getValueCount()); int docID; while ((docID = dv.nextDoc()) != NO_MORE_DOCS) { if (liveDocs.get(docID)) { for (int i = 0; i < dv.docValueCount(); i++) { bitset.set(dv.nextOrd()); } } } liveTerms[sub] = new BitsFilteredTermsEnum(dv.termsEnum(), bitset); weights[sub] = bitset.cardinality(); } } return OrdinalMap.build(null, liveTerms, weights, PackedInts.COMPACT); } /** * Selects the sorted set doc values to merge. * * @lucene.experimental */ protected static List<SortedSetDocValues> selectLeavesToMerge( FieldInfo mergeFieldInfo, MergeState mergeState) throws IOException { List<SortedSetDocValues> toMerge = new ArrayList<>(); for (int i = 0; i < mergeState.docValuesProducers.length; i++) { SortedSetDocValues values = null; DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; if (docValuesProducer != null) { FieldInfo fieldInfo = mergeState.fieldInfos[i].fieldInfo(mergeFieldInfo.name); if (fieldInfo != null && fieldInfo.getDocValuesType() == DocValuesType.SORTED_SET) { values = docValuesProducer.getSortedSet(fieldInfo); } } if (values == null) { values = DocValues.emptySortedSet(); } toMerge.add(values); } return toMerge; } /** * Returns a sorted set doc values instance from all producers in the provided merge state. * * @lucene.experimental */ protected static SortedSetDocValues getMergedSortedSetDocValues( FieldInfo mergeFieldInfo, MergeState mergeState, OrdinalMap map, List<SortedSetDocValues> toMerge) throws IOException { // We must make new iterators + DocIDMerger for each iterator: List<SortedSetDocValuesSub> subs = new ArrayList<>(); long cost = 0; boolean allSingletons = true; for (int i = 0; i < mergeState.docValuesProducers.length; i++) { SortedSetDocValues values = null; DocValuesProducer docValuesProducer = mergeState.docValuesProducers[i]; if (docValuesProducer != null) { FieldInfo readerFieldInfo = mergeState.fieldInfos[i].fieldInfo(mergeFieldInfo.name); if (readerFieldInfo != null && readerFieldInfo.getDocValuesType() == DocValuesType.SORTED_SET) { values = docValuesProducer.getSortedSet(readerFieldInfo); } } if (values == null) { values = DocValues.emptySortedSet(); } cost += values.cost(); if (allSingletons && DocValues.unwrapSingleton(values) == null) { allSingletons = false; } subs.add(new SortedSetDocValuesSub(mergeState.docMaps[i], values, map.getGlobalOrds(i))); } if (allSingletons) { // All subs are single-valued. // We specialize for that case since it makes it easier for codecs to optimize // for single-valued fields. List<SortedDocValuesSub> singleValuedSubs = new ArrayList<>(); for (SortedSetDocValuesSub sub : subs) { final SortedDocValues singleValuedValues = DocValues.unwrapSingleton(sub.values); assert singleValuedValues != null; singleValuedSubs.add(new SortedDocValuesSub(sub.docMap, singleValuedValues, sub.map)); } return DocValues.singleton(mergeSortedValues(singleValuedSubs, mergeState, map)); } final DocIDMerger<SortedSetDocValuesSub> docIDMerger = DocIDMerger.of(subs, mergeState.needsIndexSort); final long finalCost = cost; return new SortedSetDocValues() { private int docID = -1; private SortedSetDocValuesSub currentSub; @Override public int docID() { return docID; } @Override public int nextDoc() throws IOException { currentSub = docIDMerger.next(); if (currentSub == null) { docID = NO_MORE_DOCS; } else { docID = currentSub.mappedDocID; } return docID; } @Override public int advance(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public boolean advanceExact(int target) throws IOException { throw new UnsupportedOperationException(); } @Override public long nextOrd() throws IOException { long subOrd = currentSub.values.nextOrd(); return currentSub.map.get(subOrd); } @Override public int docValueCount() { return currentSub.values.docValueCount(); } @Override public long cost() { return finalCost; } @Override public BytesRef lookupOrd(long ord) throws IOException { int segmentNumber = map.getFirstSegmentNumber(ord); long segmentOrd = map.getFirstSegmentOrd(ord); return toMerge.get(segmentNumber).lookupOrd(segmentOrd); } @Override public long getValueCount() { return map.getValueCount(); } @Override public TermsEnum termsEnum() throws IOException { TermsEnum[] subs = new TermsEnum[toMerge.size()]; for (int sub = 0; sub < subs.length; ++sub) { subs[sub] = toMerge.get(sub).termsEnum(); } return new MergedTermsEnum(map, subs); } }; } // TODO: seek-by-ord to nextSetBit static class BitsFilteredTermsEnum extends FilteredTermsEnum { final LongBitSet liveTerms; BitsFilteredTermsEnum(TermsEnum in, LongBitSet liveTerms) { super(in, false); // <-- not passing false here wasted about 3 hours of my time!!!!!!!!!!!!! assert liveTerms != null; this.liveTerms = liveTerms; } @Override protected AcceptStatus accept(BytesRef term) throws IOException { if (liveTerms.get(ord())) { return AcceptStatus.YES; } else { return AcceptStatus.NO; } } } /** Helper: returns true if the given docToValue count contains only at most one value */ public static boolean isSingleValued(Iterable<Number> docToValueCount) { for (Number count : docToValueCount) { if (count.longValue() > 1) { return false; } } return true; } /** Helper: returns single-valued view, using {@code missingValue} when count is zero */ public static Iterable<Number> singletonView( final Iterable<Number> docToValueCount, final Iterable<Number> values, final Number missingValue) { assert isSingleValued(docToValueCount); return new Iterable<>() { @Override public Iterator<Number> iterator() { final Iterator<Number> countIterator = docToValueCount.iterator(); final Iterator<Number> valuesIterator = values.iterator(); return new Iterator<>() { @Override public boolean hasNext() { return countIterator.hasNext(); } @Override public Number next() { int count = countIterator.next().intValue(); if (count == 0) { return missingValue; } else { return valuesIterator.next(); } } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } }
apache/dolphinscheduler
36,770
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.api.service; import static org.apache.dolphinscheduler.api.AssertionsHelper.assertDoesNotThrow; import static org.apache.dolphinscheduler.api.AssertionsHelper.assertThrowsServiceException; import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.USER_MANAGER; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.permission.ResourcePermissionCheckService; import org.apache.dolphinscheduler.api.service.impl.BaseServiceImpl; import org.apache.dolphinscheduler.api.service.impl.UsersServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.EncryptionUtils; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; import org.apache.dolphinscheduler.dao.mapper.K8sNamespaceUserMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.plugin.storage.api.StorageOperator; import org.apache.commons.collections4.CollectionUtils; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.google.common.collect.Lists; /** * users service test */ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class UsersServiceTest { private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class); @InjectMocks private UsersServiceImpl usersService; @Mock private UserMapper userMapper; @Mock private AccessTokenMapper accessTokenMapper; @Mock private TenantMapper tenantMapper; @Mock private AlertGroupMapper alertGroupMapper; @Mock private DataSourceUserMapper datasourceUserMapper; @Mock private ProjectUserMapper projectUserMapper; @Mock private MetricsCleanUpService metricsCleanUpService; @Mock private K8sNamespaceUserMapper k8sNamespaceUserMapper; @Mock private ProjectMapper projectMapper; @Mock private StorageOperator storageOperator; @Mock private ResourcePermissionCheckService resourcePermissionCheckService; @Mock private SessionService sessionService; private String queueName = "UsersServiceTestQueue"; private static final Logger serviceLogger = LoggerFactory.getLogger(BaseServiceImpl.class); @BeforeEach public void before() { Mockito.when(resourcePermissionCheckService.functionDisabled()).thenReturn(false); } @AfterEach public void after() { } @Test public void testCreateUserForLdap() { String userName = "user1"; String email = "user1@ldap.com"; User user = usersService.createUser(UserType.ADMIN_USER, userName, email); Assertions.assertNotNull(user); } @Test public void testCreateUser() { User user = new User(); user.setUserType(UserType.ADMIN_USER); String userName = "userTest0001~"; String userPassword = "userTest"; String email = "123@qq.com"; int tenantId = Integer.MAX_VALUE; String phone = "13456432345"; int state = 1; try { // userName error Map<String, Object> result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userName = "userTest0001"; userPassword = "userTest000111111111111111"; // password error result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userPassword = "userTest0001"; email = "1q.com"; // email error result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); email = "122222@qq.com"; phone = "2233"; // phone error result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); phone = "13456432345"; // tenantId not exists result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); Assertions.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS)); // success Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); result = usersService.createUser(user, userName, userPassword, email, 1, phone, queueName, state); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { logger.error(Status.CREATE_USER_ERROR.getMsg(), e); Assertions.assertTrue(false); } } @Test public void testQueryUser() { String userName = "userTest0001"; String userPassword = "userTest0001"; when(userMapper.queryUserByNamePassword(userName, EncryptionUtils.getMd5(userPassword))) .thenReturn(getGeneralUser()); User queryUser = usersService.queryUser(userName, userPassword); logger.info(queryUser.toString()); Assertions.assertTrue(queryUser != null); } @Test public void testSelectByIds() { List<Integer> ids = new ArrayList<>(); List<User> users = usersService.queryUser(ids); Assertions.assertTrue(users.isEmpty()); ids.add(1); List<User> userList = new ArrayList<>(); userList.add(new User()); when(userMapper.selectByIds(ids)).thenReturn(userList); List<User> userList1 = usersService.queryUser(ids); Assertions.assertFalse(userList1.isEmpty()); } @Test public void testGetUserIdByName() { User user = new User(); user.setId(1); user.setUserType(UserType.ADMIN_USER); user.setUserName("test_user"); // user name null int userId = usersService.getUserIdByName(""); Assertions.assertEquals(0, userId); // user not exist when(usersService.queryUser(user.getUserName())).thenReturn(null); int userNotExistId = usersService.getUserIdByName(user.getUserName()); Assertions.assertEquals(-1, userNotExistId); // user exist when(usersService.queryUser(user.getUserName())).thenReturn(user); Integer userExistId = usersService.getUserIdByName(user.getUserName()); Assertions.assertEquals(user.getId(), userExistId); } @Test public void testQueryUserList() { User user = new User(); user.setUserType(UserType.ADMIN_USER); user.setId(1); Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ACCESS_TOKEN, 1, USER_MANAGER, serviceLogger)).thenReturn(true); Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.ACCESS_TOKEN, null, 0, serviceLogger)).thenReturn(false); Map<String, Object> result = usersService.queryUserList(user); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // success Mockito.when(resourcePermissionCheckService.operationPermissionCheck(AuthorizationType.ACCESS_TOKEN, 1, USER_MANAGER, serviceLogger)).thenReturn(true); Mockito.when(resourcePermissionCheckService.resourcePermissionCheck(AuthorizationType.ACCESS_TOKEN, null, 0, serviceLogger)).thenReturn(true); user.setUserType(UserType.ADMIN_USER); when(userMapper.queryEnabledUsers()).thenReturn(getUserList()); result = usersService.queryUserList(user); List<User> userList = (List<User>) result.get(Constants.DATA_LIST); Assertions.assertTrue(userList.size() > 0); } @Test public void testQueryUserListPage() { User user = new User(); IPage<User> page = new Page<>(1, 10); page.setRecords(getUserList()); when(userMapper.queryUserPaging(any(Page.class), eq("userTest"))).thenReturn(page); // no operate Result result = usersService.queryUserList(user, "userTest", 1, 10); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), (int) result.getCode()); // success user.setUserType(UserType.ADMIN_USER); result = usersService.queryUserList(user, "userTest", 1, 10); Assertions.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); PageInfo<User> pageInfo = (PageInfo<User>) result.getData(); Assertions.assertTrue(pageInfo.getTotalList().size() > 0); } @Test public void testUpdateUser() { String userName = "userTest0001"; String userPassword = "userTest0001"; // user not exist assertThrowsServiceException( Status.USER_NOT_EXIST, () -> usersService.updateUser(getLoginUser(), 0, userName, userPassword, "3443@qq.com", 1, "13457864543", "queue", 1, "Asia/Shanghai")); // success when(userMapper.selectById(any())).thenReturn(getUser()); when(userMapper.updateById(any())).thenReturn(1); assertDoesNotThrow(() -> usersService.updateUser(getLoginUser(), 1, userName, userPassword, "32222s@qq.com", 1, "13457864543", "queue", 1, "Asia/Shanghai")); // non-admin should not modify tenantId and queue when(userMapper.selectById(2)).thenReturn(getNonAdminUser()); User user = userMapper.selectById(2); assertThrowsServiceException(Status.USER_NO_OPERATION_PERM, () -> usersService.updateUser(user, 2, userName, userPassword, "abc@qq.com", null, "13457864543", "offline", 1, "Asia/Shanghai")); } @Test public void testDeleteUserById() { User loginUser = new User(); try { when(userMapper.queryTenantCodeByUserId(1)).thenReturn(getUser()); when(userMapper.selectById(1)).thenReturn(getUser()); when(userMapper.deleteById(1)).thenReturn(1); // no operate Map<String, Object> result = usersService.deleteUserById(loginUser, 3); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // user not exist loginUser.setUserType(UserType.ADMIN_USER); result = usersService.deleteUserById(loginUser, 3); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // user is project owner Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(Lists.newArrayList(new Project())); result = usersService.deleteUserById(loginUser, 1); Assertions.assertEquals(Status.TRANSFORM_PROJECT_OWNERSHIP, result.get(Constants.STATUS)); // success Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(null); Mockito.doNothing().when(metricsCleanUpService).cleanUpApiResponseTimeMetricsByUserId(Mockito.anyInt()); result = usersService.deleteUserById(loginUser, 1); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); Mockito.verify(metricsCleanUpService, times(1)).cleanUpApiResponseTimeMetricsByUserId(Mockito.anyInt()); } catch (Exception e) { logger.error("delete user error", e); Assertions.assertTrue(false); } } @Test public void testGrantProject() { String projectIds = "100000,120000"; User loginUser = new User(); int userId = 3; // user not exist loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); when(userMapper.selectById(userId)).thenReturn(null); Map<String, Object> result = usersService.grantProject(loginUser, userId, projectIds); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // SUCCESS when(userMapper.selectById(userId)).thenReturn(getUser()); result = usersService.grantProject(loginUser, userId, projectIds); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); // ERROR: NO_CURRENT_OPERATING_PERMISSION loginUser.setId(3); loginUser.setUserType(UserType.GENERAL_USER); when(userMapper.selectById(3)).thenReturn(loginUser); result = this.usersService.grantProject(loginUser, userId, projectIds); logger.info(result.toString()); Assertions.assertEquals(Status.NO_CURRENT_OPERATING_PERMISSION, result.get(Constants.STATUS)); } @Test public void testGrantProjectWithReadPerm() { String projectIds = "100000,120000"; User loginUser = new User(); int userId = 3; // user not exist loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); when(userMapper.selectById(userId)).thenReturn(null); Map<String, Object> result = usersService.grantProjectWithReadPerm(loginUser, userId, projectIds); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // SUCCESS when(userMapper.selectById(userId)).thenReturn(getUser()); result = usersService.grantProjectWithReadPerm(loginUser, userId, projectIds); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); // ERROR: NO_CURRENT_OPERATING_PERMISSION loginUser.setId(3); loginUser.setUserType(UserType.GENERAL_USER); when(userMapper.selectById(3)).thenReturn(loginUser); result = this.usersService.grantProjectWithReadPerm(loginUser, userId, projectIds); logger.info(result.toString()); Assertions.assertEquals(Status.NO_CURRENT_OPERATING_PERMISSION, result.get(Constants.STATUS)); } @Test public void testGrantProjectByCode() { // Mock Project, User final long projectCode = 1L; final int projectCreator = 1; final int authorizer = 100; Mockito.when(this.userMapper.selectById(authorizer)).thenReturn(this.getUser()); Mockito.when(this.userMapper.selectById(projectCreator)).thenReturn(this.getUser()); Mockito.when(this.projectMapper.queryByCode(projectCode)).thenReturn(this.getProject()); // ERROR: USER_NOT_EXIST User loginUser = new User(); Map<String, Object> result = this.usersService.grantProjectByCode(loginUser, 999, projectCode); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // ERROR: PROJECT_NOT_FOUNT result = this.usersService.grantProjectByCode(loginUser, authorizer, 999); logger.info(result.toString()); Assertions.assertEquals(Status.PROJECT_NOT_FOUND, result.get(Constants.STATUS)); // ERROR: USER_NO_OPERATION_PERM loginUser.setId(999); loginUser.setUserType(UserType.GENERAL_USER); result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // SUCCESS: USER IS PROJECT OWNER loginUser.setId(projectCreator); loginUser.setUserType(UserType.GENERAL_USER); result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); // SUCCESS: USER IS ADMINISTRATOR loginUser.setId(999); loginUser.setUserType(UserType.ADMIN_USER); result = this.usersService.grantProjectByCode(loginUser, authorizer, projectCode); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testRevokeProject() { Mockito.when(this.userMapper.selectById(1)).thenReturn(this.getUser()); final long projectCode = 3682329499136L; // user no permission User loginUser = new User(); loginUser.setId(0); Map<String, Object> result = this.usersService.revokeProject(loginUser, 1, projectCode); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // user not exist loginUser.setUserType(UserType.ADMIN_USER); result = this.usersService.revokeProject(loginUser, 2, projectCode); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // success Project project = new Project(); project.setId(0); Mockito.when(this.projectMapper.queryByCode(Mockito.anyLong())).thenReturn(project); result = this.usersService.revokeProject(loginUser, 1, projectCode); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testRevokeProjectById() { Mockito.when(this.userMapper.selectById(1)).thenReturn(this.getUser()); String projectId = "100000"; // user no permission User loginUser = new User(); Map<String, Object> result = this.usersService.revokeProjectById(loginUser, 1, projectId); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // user not exist loginUser.setUserType(UserType.ADMIN_USER); result = this.usersService.revokeProjectById(loginUser, 2, projectId); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // success Mockito.when(this.projectMapper.queryByCode(Mockito.anyLong())).thenReturn(new Project()); result = this.usersService.revokeProjectById(loginUser, 1, projectId); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testGrantNamespaces() { String namespaceIds = "100000,120000"; when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); // user not exist loginUser.setUserType(UserType.ADMIN_USER); Map<String, Object> result = usersService.grantNamespaces(loginUser, 2, namespaceIds); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // success when(k8sNamespaceUserMapper.deleteNamespaceRelation(0, 1)).thenReturn(1); result = usersService.grantNamespaces(loginUser, 1, namespaceIds); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testGrantDataSource() { String datasourceIds = "100000,120000"; User loginUser = new User(); int userId = 3; // user not exist loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); when(userMapper.selectById(userId)).thenReturn(null); Map<String, Object> result = usersService.grantDataSource(loginUser, userId, datasourceIds); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // test admin user when(userMapper.selectById(userId)).thenReturn(getUser()); when(datasourceUserMapper.deleteByUserId(Mockito.anyInt())).thenReturn(1); result = usersService.grantDataSource(loginUser, userId, datasourceIds); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); // test non-admin user loginUser.setId(2); loginUser.setUserType(UserType.GENERAL_USER); result = usersService.grantDataSource(loginUser, userId, datasourceIds); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); } private User getLoginUser() { User loginUser = new User(); loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); return loginUser; } @Test public void getUserInfo() { User loginUser = new User(); loginUser.setUserName("admin"); loginUser.setUserType(UserType.ADMIN_USER); // get admin user Map<String, Object> result = usersService.getUserInfo(loginUser); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); User tempUser = (User) result.get(Constants.DATA_LIST); // check userName Assertions.assertEquals("admin", tempUser.getUserName()); // get general user loginUser.setUserType(null); loginUser.setId(1); when(userMapper.queryDetailsById(1)).thenReturn(getGeneralUser()); when(alertGroupMapper.queryByUserId(1)).thenReturn(getAlertGroups()); result = usersService.getUserInfo(loginUser); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); tempUser = (User) result.get(Constants.DATA_LIST); // check userName Assertions.assertEquals("userTest0001", tempUser.getUserName()); } @Test public void testQueryAllGeneralUsers() { User loginUser = new User(); // no operate Map<String, Object> result = usersService.queryAllGeneralUsers(loginUser); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // success loginUser.setUserType(UserType.ADMIN_USER); when(userMapper.queryAllGeneralUser()).thenReturn(getUserList()); result = usersService.queryAllGeneralUsers(loginUser); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List<User> userList = (List<User>) result.get(Constants.DATA_LIST); Assertions.assertTrue(CollectionUtils.isNotEmpty(userList)); } @Test public void testVerifyUserName() { // not exist user Result result = usersService.verifyUserName("admin89899"); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); // exist user when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser()); result = usersService.verifyUserName("userTest0001"); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NAME_EXIST.getMsg(), result.getMsg()); } @Test public void testUnauthorizedUser() { User loginUser = new User(); when(userMapper.selectList(null)).thenReturn(getUserList()); when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList()); // no operate Map<String, Object> result = usersService.unauthorizedUser(loginUser, 2); logger.info(result.toString()); loginUser.setUserType(UserType.ADMIN_USER); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // success result = usersService.unauthorizedUser(loginUser, 2); logger.info(result.toString()); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test public void testAuthorizedUser() { User loginUser = new User(); when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList()); // no operate Map<String, Object> result = usersService.authorizedUser(loginUser, 2); logger.info(result.toString()); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // success loginUser.setUserType(UserType.ADMIN_USER); result = usersService.authorizedUser(loginUser, 2); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List<User> userList = (List<User>) result.get(Constants.DATA_LIST); logger.info(result.toString()); Assertions.assertTrue(CollectionUtils.isNotEmpty(userList)); } @Test public void testRegisterUser() { String userName = "userTest0002~"; String userPassword = "userTest"; String repeatPassword = "userTest"; String email = "123@qq.com"; try { // userName error Map<String, Object> result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userName = "userTest0002"; userPassword = "userTest000111111111111111"; // password error result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userPassword = "userTest0002"; email = "1q.com"; // email error result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // repeatPassword error email = "7400@qq.com"; repeatPassword = "userPassword"; result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // success repeatPassword = "userTest0002"; result = usersService.registerUser(userName, userPassword, repeatPassword, email); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { Assertions.assertTrue(false); } } @Test public void testActivateUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); String userName = "userTest0002~"; try { // not admin Map<String, Object> result = usersService.activateUser(user, userName); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // userName error user.setUserType(UserType.ADMIN_USER); result = usersService.activateUser(user, userName); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // user not exist userName = "userTest10013"; result = usersService.activateUser(user, userName); Assertions.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); // user state error userName = "userTest0001"; when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getUser()); result = usersService.activateUser(user, userName); Assertions.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // success when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getDisabledUser()); result = usersService.activateUser(user, userName); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { Assertions.assertTrue(false); } } @Test public void testBatchActivateUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); List<String> userNames = new ArrayList<>(); userNames.add("userTest0001"); userNames.add("userTest0002"); userNames.add("userTest0003~"); userNames.add("userTest0004"); try { // not admin Map<String, Object> result = usersService.batchActivateUser(user, userNames); Assertions.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // batch activate user names user.setUserType(UserType.ADMIN_USER); when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser()); when(userMapper.queryByUserNameAccurately("userTest0002")).thenReturn(getDisabledUser()); result = usersService.batchActivateUser(user, userNames); Map<String, Object> responseData = (Map<String, Object>) result.get(Constants.DATA_LIST); Map<String, Object> successData = (Map<String, Object>) responseData.get("success"); int totalSuccess = (Integer) successData.get("sum"); Map<String, Object> failedData = (Map<String, Object>) responseData.get("failed"); int totalFailed = (Integer) failedData.get("sum"); Assertions.assertEquals(1, totalSuccess); Assertions.assertEquals(3, totalFailed); Assertions.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { Assertions.assertTrue(false); } } @Test public void testCreateUserIfNotExists() { User user; String userName = "userTest0001"; String userPassword = "userTest"; String email = "abc@x.com"; String phone = "17366666666"; String tenantCode = "tenantCode"; int stat = 1; // User exists when(userMapper.existUser(userName)).thenReturn(true); when(userMapper.selectById(getUser().getId())).thenReturn(getUser()); when(userMapper.queryDetailsById(getUser().getId())).thenReturn(getUser()); when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getUser()); when(userMapper.updateById(any())).thenReturn(1); when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getTenant()); user = usersService.createUserIfNotExists(userName, userPassword, email, phone, tenantCode, queueName, stat); Assertions.assertEquals(getUser(), user); // User not exists Mockito.when(userMapper.existUser(userName)).thenReturn(false); Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getTenant()); user = usersService.createUserIfNotExists(userName, userPassword, email, phone, tenantCode, queueName, stat); Assertions.assertNotNull(user); } /** * get disabled user */ private User getDisabledUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); user.setUserName("userTest0001"); user.setUserPassword("userTest0001"); user.setState(0); return user; } /** * Get project * * @return */ private Project getProject() { Project project = new Project(); project.setId(1); project.setCode(1L); project.setUserId(1); project.setName("PJ-001"); project.setPerm(7); project.setDefCount(0); return project; } /** * get user */ private User getGeneralUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); user.setUserName("userTest0001"); user.setUserPassword("userTest0001"); return user; } private List<User> getUserList() { List<User> userList = new ArrayList<>(); userList.add(getGeneralUser()); return userList; } /** * get user */ private User getUser() { User user = new User(); user.setId(0); user.setUserType(UserType.ADMIN_USER); user.setUserName("userTest0001"); user.setUserPassword("userTest0001"); user.setState(1); return user; } /** * get non-admin user * * @return user */ private User getNonAdminUser() { User user = new User(); user.setId(2); user.setUserType(UserType.GENERAL_USER); user.setUserName("userTest0001"); user.setUserPassword("userTest0001"); user.setTenantId(2); user.setQueue("queue"); return user; } /** * get tenant * * @return tenant */ private Tenant getTenant() { Tenant tenant = new Tenant(); tenant.setId(1); return tenant; } private List<AlertGroup> getAlertGroups() { List<AlertGroup> alertGroups = new ArrayList<>(); AlertGroup alertGroup = new AlertGroup(); alertGroups.add(alertGroup); return alertGroups; } }
apache/incubator-kie-drools
36,423
kie-dmn/kie-dmn-validation/src/main/java/org/kie/dmn/validation/dtanalysis/mcdc/dmntck/TestCases.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ // Copied under Apache License from https://github.com/dmn-tck/tck/blob/8c23dc13caa508a33d11b47cca318d7c3a3ca2fc/LICENSE-ASL-2.0.txt package org.kie.dmn.validation.dtanalysis.mcdc.dmntck; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.kie.dmn.feel.util.Generated; import jakarta.xml.bind.JAXBElement; import jakarta.xml.bind.annotation.XmlAccessType; import jakarta.xml.bind.annotation.XmlAccessorType; import jakarta.xml.bind.annotation.XmlAnyAttribute; import jakarta.xml.bind.annotation.XmlAnyElement; import jakarta.xml.bind.annotation.XmlAttribute; import jakarta.xml.bind.annotation.XmlElement; import jakarta.xml.bind.annotation.XmlRootElement; import jakarta.xml.bind.annotation.XmlSchemaType; import jakarta.xml.bind.annotation.XmlType; import javax.xml.namespace.QName; import org.w3c.dom.Element; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="modelName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt; * &lt;element name="labels" minOccurs="0"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="label" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;element name="testCase" maxOccurs="unbounded"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt; * &lt;element name="inputNode" maxOccurs="unbounded" minOccurs="0"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;extension base="{http://www.omg.org/spec/DMN/20160719/testcase}valueType"&gt; * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /&gt; * &lt;anyAttribute processContents='lax' namespace='##other'/&gt; * &lt;/extension&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;element name="resultNode" maxOccurs="unbounded" minOccurs="0"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="computed" type="{http://www.omg.org/spec/DMN/20160719/testcase}valueType" minOccurs="0"/&gt; * &lt;element name="expected" type="{http://www.omg.org/spec/DMN/20160719/testcase}valueType" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;attribute name="errorResult" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /&gt; * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /&gt; * &lt;attribute name="type" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="cast" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;element name="extensionElements" minOccurs="0"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;any processContents='lax' namespace='##other' maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;/sequence&gt; * &lt;attribute name="id" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="type" type="{http://www.omg.org/spec/DMN/20160719/testcase}testCaseType" default="decision" /&gt; * &lt;attribute name="invocableName" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="name" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;anyAttribute processContents='lax' namespace='##other'/&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @Generated("com.sun.tools.xjc.Driver") @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "modelName", "labels", "testCase" }) @XmlRootElement(name = "testCases") public class TestCases { protected String modelName; protected TestCases.Labels labels; @XmlElement(required = true) protected java.util.List<TestCases.TestCase> testCase; /** * Gets the value of the modelName property. * * @return * possible object is * {@link String } * */ public String getModelName() { return modelName; } /** * Sets the value of the modelName property. * * @param value * allowed object is * {@link String } * */ public void setModelName(String value) { this.modelName = value; } /** * Gets the value of the labels property. * * @return * possible object is * {@link TestCases.Labels } * */ public TestCases.Labels getLabels() { return labels; } /** * Sets the value of the labels property. * * @param value * allowed object is * {@link TestCases.Labels } * */ public void setLabels(TestCases.Labels value) { this.labels = value; } /** * Gets the value of the testCase property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the testCase property. * * <p> * For example, to add a new item, do as follows: * <pre> * getTestCase().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TestCases.TestCase } * * */ public java.util.List<TestCases.TestCase> getTestCase() { if (testCase == null) { testCase = new ArrayList<>(); } return this.testCase; } public TestCases withModelName(String value) { setModelName(value); return this; } public TestCases withLabels(TestCases.Labels value) { setLabels(value); return this; } public TestCases withTestCase(TestCases.TestCase... values) { if (values!= null) { for (TestCases.TestCase value: values) { getTestCase().add(value); } } return this; } public TestCases withTestCase(Collection<TestCases.TestCase> values) { if (values!= null) { getTestCase().addAll(values); } return this; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="label" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "label" }) public static class Labels { protected java.util.List<String> label; /** * Gets the value of the label property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the label property. * * <p> * For example, to add a new item, do as follows: * <pre> * getLabel().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public java.util.List<String> getLabel() { if (label == null) { label = new ArrayList<>(); } return this.label; } public TestCases.Labels withLabel(String... values) { if (values!= null) { for (String value: values) { getLabel().add(value); } } return this; } public TestCases.Labels withLabel(Collection<String> values) { if (values!= null) { getLabel().addAll(values); } return this; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt; * &lt;element name="inputNode" maxOccurs="unbounded" minOccurs="0"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;extension base="{http://www.omg.org/spec/DMN/20160719/testcase}valueType"&gt; * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /&gt; * &lt;anyAttribute processContents='lax' namespace='##other'/&gt; * &lt;/extension&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;element name="resultNode" maxOccurs="unbounded" minOccurs="0"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="computed" type="{http://www.omg.org/spec/DMN/20160719/testcase}valueType" minOccurs="0"/&gt; * &lt;element name="expected" type="{http://www.omg.org/spec/DMN/20160719/testcase}valueType" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;attribute name="errorResult" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /&gt; * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /&gt; * &lt;attribute name="type" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="cast" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;element name="extensionElements" minOccurs="0"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;any processContents='lax' namespace='##other' maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;/sequence&gt; * &lt;attribute name="id" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="type" type="{http://www.omg.org/spec/DMN/20160719/testcase}testCaseType" default="decision" /&gt; * &lt;attribute name="invocableName" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="name" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;anyAttribute processContents='lax' namespace='##other'/&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "description", "inputNode", "resultNode", "extensionElements" }) public static class TestCase { protected String description; protected java.util.List<TestCases.TestCase.InputNode> inputNode; protected java.util.List<TestCases.TestCase.ResultNode> resultNode; protected TestCases.TestCase.ExtensionElements extensionElements; @XmlAttribute(name = "id") protected String id; @XmlAttribute(name = "type") protected TestCaseType type; @XmlAttribute(name = "invocableName") protected String invocableName; @XmlAttribute(name = "name") protected String name; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<>(); /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the inputNode property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the inputNode property. * * <p> * For example, to add a new item, do as follows: * <pre> * getInputNode().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TestCases.TestCase.InputNode } * * */ public java.util.List<TestCases.TestCase.InputNode> getInputNode() { if (inputNode == null) { inputNode = new ArrayList<>(); } return this.inputNode; } /** * Gets the value of the resultNode property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the resultNode property. * * <p> * For example, to add a new item, do as follows: * <pre> * getResultNode().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TestCases.TestCase.ResultNode } * * */ public java.util.List<TestCases.TestCase.ResultNode> getResultNode() { if (resultNode == null) { resultNode = new ArrayList<>(); } return this.resultNode; } /** * Gets the value of the extensionElements property. * * @return * possible object is * {@link TestCases.TestCase.ExtensionElements } * */ public TestCases.TestCase.ExtensionElements getExtensionElements() { return extensionElements; } /** * Sets the value of the extensionElements property. * * @param value * allowed object is * {@link TestCases.TestCase.ExtensionElements } * */ public void setExtensionElements(TestCases.TestCase.ExtensionElements value) { this.extensionElements = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the type property. * * @return * possible object is * {@link TestCaseType } * */ public TestCaseType getType() { if (type == null) { return TestCaseType.DECISION; } else { return type; } } /** * Sets the value of the type property. * * @param value * allowed object is * {@link TestCaseType } * */ public void setType(TestCaseType value) { this.type = value; } /** * Gets the value of the invocableName property. * * @return * possible object is * {@link String } * */ public String getInvocableName() { return invocableName; } /** * Sets the value of the invocableName property. * * @param value * allowed object is * {@link String } * */ public void setInvocableName(String value) { this.invocableName = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } public TestCases.TestCase withDescription(String value) { setDescription(value); return this; } public TestCases.TestCase withInputNode(TestCases.TestCase.InputNode... values) { if (values!= null) { for (TestCases.TestCase.InputNode value: values) { getInputNode().add(value); } } return this; } public TestCases.TestCase withInputNode(Collection<TestCases.TestCase.InputNode> values) { if (values!= null) { getInputNode().addAll(values); } return this; } public TestCases.TestCase withResultNode(TestCases.TestCase.ResultNode... values) { if (values!= null) { for (TestCases.TestCase.ResultNode value: values) { getResultNode().add(value); } } return this; } public TestCases.TestCase withResultNode(Collection<TestCases.TestCase.ResultNode> values) { if (values!= null) { getResultNode().addAll(values); } return this; } public TestCases.TestCase withExtensionElements(TestCases.TestCase.ExtensionElements value) { setExtensionElements(value); return this; } public TestCases.TestCase withId(String value) { setId(value); return this; } public TestCases.TestCase withType(TestCaseType value) { setType(value); return this; } public TestCases.TestCase withInvocableName(String value) { setInvocableName(value); return this; } public TestCases.TestCase withName(String value) { setName(value); return this; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;any processContents='lax' namespace='##other' maxOccurs="unbounded" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "any" }) public static class ExtensionElements { @XmlAnyElement(lax = true) protected java.util.List<Object> any; /** * Gets the value of the any property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the any property. * * <p> * For example, to add a new item, do as follows: * <pre> * getAny().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Object } * {@link Element } * * */ public java.util.List<Object> getAny() { if (any == null) { any = new ArrayList<>(); } return this.any; } public TestCases.TestCase.ExtensionElements withAny(Object... values) { if (values!= null) { for (Object value: values) { getAny().add(value); } } return this; } public TestCases.TestCase.ExtensionElements withAny(Collection<Object> values) { if (values!= null) { getAny().addAll(values); } return this; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;extension base="{http://www.omg.org/spec/DMN/20160719/testcase}valueType"&gt; * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /&gt; * &lt;anyAttribute processContents='lax' namespace='##other'/&gt; * &lt;/extension&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") public static class InputNode extends ValueType { @XmlAttribute(name = "name", required = true) @XmlSchemaType(name = "anySimpleType") protected String name; /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } public TestCases.TestCase.InputNode withName(String value) { setName(value); return this; } @Override public TestCases.TestCase.InputNode withValue(JAXBElement<Object> value) { setValue(value); return this; } @Override public TestCases.TestCase.InputNode withComponent(ValueType.Component... values) { if (values!= null) { for (ValueType.Component value: values) { getComponent().add(value); } } return this; } @Override public TestCases.TestCase.InputNode withComponent(Collection<ValueType.Component> values) { if (values!= null) { getComponent().addAll(values); } return this; } @Override public TestCases.TestCase.InputNode withList(JAXBElement<ValueType.List> value) { setList(value); return this; } @Override public TestCases.TestCase.InputNode withExtensionElements(ValueType.ExtensionElements value) { setExtensionElements(value); return this; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="computed" type="{http://www.omg.org/spec/DMN/20160719/testcase}valueType" minOccurs="0"/&gt; * &lt;element name="expected" type="{http://www.omg.org/spec/DMN/20160719/testcase}valueType" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;attribute name="errorResult" type="{http://www.w3.org/2001/XMLSchema}boolean" default="false" /&gt; * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /&gt; * &lt;attribute name="type" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;attribute name="cast" type="{http://www.w3.org/2001/XMLSchema}string" /&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "computed", "expected" }) public static class ResultNode { protected ValueType computed; protected ValueType expected; @XmlAttribute(name = "errorResult") protected Boolean errorResult; @XmlAttribute(name = "name", required = true) @XmlSchemaType(name = "anySimpleType") protected String name; @XmlAttribute(name = "type") protected String type; @XmlAttribute(name = "cast") protected String cast; /** * Gets the value of the computed property. * * @return * possible object is * {@link ValueType } * */ public ValueType getComputed() { return computed; } /** * Sets the value of the computed property. * * @param value * allowed object is * {@link ValueType } * */ public void setComputed(ValueType value) { this.computed = value; } /** * Gets the value of the expected property. * * @return * possible object is * {@link ValueType } * */ public ValueType getExpected() { return expected; } /** * Sets the value of the expected property. * * @param value * allowed object is * {@link ValueType } * */ public void setExpected(ValueType value) { this.expected = value; } /** * Gets the value of the errorResult property. * * @return * possible object is * {@link Boolean } * */ public boolean isErrorResult() { if (errorResult == null) { return false; } else { return errorResult; } } /** * Sets the value of the errorResult property. * * @param value * allowed object is * {@link Boolean } * */ public void setErrorResult(Boolean value) { this.errorResult = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } /** * Gets the value of the cast property. * * @return * possible object is * {@link String } * */ public String getCast() { return cast; } /** * Sets the value of the cast property. * * @param value * allowed object is * {@link String } * */ public void setCast(String value) { this.cast = value; } public TestCases.TestCase.ResultNode withComputed(ValueType value) { setComputed(value); return this; } public TestCases.TestCase.ResultNode withExpected(ValueType value) { setExpected(value); return this; } public TestCases.TestCase.ResultNode withErrorResult(Boolean value) { setErrorResult(value); return this; } public TestCases.TestCase.ResultNode withName(String value) { setName(value); return this; } public TestCases.TestCase.ResultNode withType(String value) { setType(value); return this; } public TestCases.TestCase.ResultNode withCast(String value) { setCast(value); return this; } } } }
googleapis/google-cloud-java
36,630
java-assured-workloads/proto-google-cloud-assured-workloads-v1/src/main/java/com/google/cloud/assuredworkloads/v1/ListViolationsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/assuredworkloads/v1/assuredworkloads.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.assuredworkloads.v1; /** * * * <pre> * Response of ListViolations endpoint. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1.ListViolationsResponse} */ public final class ListViolationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1.ListViolationsResponse) ListViolationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListViolationsResponse.newBuilder() to construct. private ListViolationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListViolationsResponse() { violations_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListViolationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_ListViolationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_ListViolationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1.ListViolationsResponse.class, com.google.cloud.assuredworkloads.v1.ListViolationsResponse.Builder.class); } public static final int VIOLATIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.assuredworkloads.v1.Violation> violations_; /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.assuredworkloads.v1.Violation> getViolationsList() { return violations_; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.assuredworkloads.v1.ViolationOrBuilder> getViolationsOrBuilderList() { return violations_; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ @java.lang.Override public int getViolationsCount() { return violations_.size(); } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ @java.lang.Override public com.google.cloud.assuredworkloads.v1.Violation getViolations(int index) { return violations_.get(index); } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ @java.lang.Override public com.google.cloud.assuredworkloads.v1.ViolationOrBuilder getViolationsOrBuilder(int index) { return violations_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token. Returns empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The next page token. Returns empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < violations_.size(); i++) { output.writeMessage(1, violations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < violations_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, violations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.assuredworkloads.v1.ListViolationsResponse)) { return super.equals(obj); } com.google.cloud.assuredworkloads.v1.ListViolationsResponse other = (com.google.cloud.assuredworkloads.v1.ListViolationsResponse) obj; if (!getViolationsList().equals(other.getViolationsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getViolationsCount() > 0) { hash = (37 * hash) + VIOLATIONS_FIELD_NUMBER; hash = (53 * hash) + getViolationsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.assuredworkloads.v1.ListViolationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response of ListViolations endpoint. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1.ListViolationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1.ListViolationsResponse) com.google.cloud.assuredworkloads.v1.ListViolationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_ListViolationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_ListViolationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1.ListViolationsResponse.class, com.google.cloud.assuredworkloads.v1.ListViolationsResponse.Builder.class); } // Construct using com.google.cloud.assuredworkloads.v1.ListViolationsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (violationsBuilder_ == null) { violations_ = java.util.Collections.emptyList(); } else { violations_ = null; violationsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_ListViolationsResponse_descriptor; } @java.lang.Override public com.google.cloud.assuredworkloads.v1.ListViolationsResponse getDefaultInstanceForType() { return com.google.cloud.assuredworkloads.v1.ListViolationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.assuredworkloads.v1.ListViolationsResponse build() { com.google.cloud.assuredworkloads.v1.ListViolationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.assuredworkloads.v1.ListViolationsResponse buildPartial() { com.google.cloud.assuredworkloads.v1.ListViolationsResponse result = new com.google.cloud.assuredworkloads.v1.ListViolationsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.assuredworkloads.v1.ListViolationsResponse result) { if (violationsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { violations_ = java.util.Collections.unmodifiableList(violations_); bitField0_ = (bitField0_ & ~0x00000001); } result.violations_ = violations_; } else { result.violations_ = violationsBuilder_.build(); } } private void buildPartial0(com.google.cloud.assuredworkloads.v1.ListViolationsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.assuredworkloads.v1.ListViolationsResponse) { return mergeFrom((com.google.cloud.assuredworkloads.v1.ListViolationsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.assuredworkloads.v1.ListViolationsResponse other) { if (other == com.google.cloud.assuredworkloads.v1.ListViolationsResponse.getDefaultInstance()) return this; if (violationsBuilder_ == null) { if (!other.violations_.isEmpty()) { if (violations_.isEmpty()) { violations_ = other.violations_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureViolationsIsMutable(); violations_.addAll(other.violations_); } onChanged(); } } else { if (!other.violations_.isEmpty()) { if (violationsBuilder_.isEmpty()) { violationsBuilder_.dispose(); violationsBuilder_ = null; violations_ = other.violations_; bitField0_ = (bitField0_ & ~0x00000001); violationsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getViolationsFieldBuilder() : null; } else { violationsBuilder_.addAllMessages(other.violations_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.assuredworkloads.v1.Violation m = input.readMessage( com.google.cloud.assuredworkloads.v1.Violation.parser(), extensionRegistry); if (violationsBuilder_ == null) { ensureViolationsIsMutable(); violations_.add(m); } else { violationsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.assuredworkloads.v1.Violation> violations_ = java.util.Collections.emptyList(); private void ensureViolationsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { violations_ = new java.util.ArrayList<com.google.cloud.assuredworkloads.v1.Violation>(violations_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.assuredworkloads.v1.Violation, com.google.cloud.assuredworkloads.v1.Violation.Builder, com.google.cloud.assuredworkloads.v1.ViolationOrBuilder> violationsBuilder_; /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public java.util.List<com.google.cloud.assuredworkloads.v1.Violation> getViolationsList() { if (violationsBuilder_ == null) { return java.util.Collections.unmodifiableList(violations_); } else { return violationsBuilder_.getMessageList(); } } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public int getViolationsCount() { if (violationsBuilder_ == null) { return violations_.size(); } else { return violationsBuilder_.getCount(); } } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public com.google.cloud.assuredworkloads.v1.Violation getViolations(int index) { if (violationsBuilder_ == null) { return violations_.get(index); } else { return violationsBuilder_.getMessage(index); } } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder setViolations(int index, com.google.cloud.assuredworkloads.v1.Violation value) { if (violationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureViolationsIsMutable(); violations_.set(index, value); onChanged(); } else { violationsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder setViolations( int index, com.google.cloud.assuredworkloads.v1.Violation.Builder builderForValue) { if (violationsBuilder_ == null) { ensureViolationsIsMutable(); violations_.set(index, builderForValue.build()); onChanged(); } else { violationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder addViolations(com.google.cloud.assuredworkloads.v1.Violation value) { if (violationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureViolationsIsMutable(); violations_.add(value); onChanged(); } else { violationsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder addViolations(int index, com.google.cloud.assuredworkloads.v1.Violation value) { if (violationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureViolationsIsMutable(); violations_.add(index, value); onChanged(); } else { violationsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder addViolations( com.google.cloud.assuredworkloads.v1.Violation.Builder builderForValue) { if (violationsBuilder_ == null) { ensureViolationsIsMutable(); violations_.add(builderForValue.build()); onChanged(); } else { violationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder addViolations( int index, com.google.cloud.assuredworkloads.v1.Violation.Builder builderForValue) { if (violationsBuilder_ == null) { ensureViolationsIsMutable(); violations_.add(index, builderForValue.build()); onChanged(); } else { violationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder addAllViolations( java.lang.Iterable<? extends com.google.cloud.assuredworkloads.v1.Violation> values) { if (violationsBuilder_ == null) { ensureViolationsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, violations_); onChanged(); } else { violationsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder clearViolations() { if (violationsBuilder_ == null) { violations_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { violationsBuilder_.clear(); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public Builder removeViolations(int index) { if (violationsBuilder_ == null) { ensureViolationsIsMutable(); violations_.remove(index); onChanged(); } else { violationsBuilder_.remove(index); } return this; } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public com.google.cloud.assuredworkloads.v1.Violation.Builder getViolationsBuilder(int index) { return getViolationsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public com.google.cloud.assuredworkloads.v1.ViolationOrBuilder getViolationsOrBuilder( int index) { if (violationsBuilder_ == null) { return violations_.get(index); } else { return violationsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public java.util.List<? extends com.google.cloud.assuredworkloads.v1.ViolationOrBuilder> getViolationsOrBuilderList() { if (violationsBuilder_ != null) { return violationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(violations_); } } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public com.google.cloud.assuredworkloads.v1.Violation.Builder addViolationsBuilder() { return getViolationsFieldBuilder() .addBuilder(com.google.cloud.assuredworkloads.v1.Violation.getDefaultInstance()); } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public com.google.cloud.assuredworkloads.v1.Violation.Builder addViolationsBuilder(int index) { return getViolationsFieldBuilder() .addBuilder(index, com.google.cloud.assuredworkloads.v1.Violation.getDefaultInstance()); } /** * * * <pre> * List of Violations under a Workload. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1.Violation violations = 1;</code> */ public java.util.List<com.google.cloud.assuredworkloads.v1.Violation.Builder> getViolationsBuilderList() { return getViolationsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.assuredworkloads.v1.Violation, com.google.cloud.assuredworkloads.v1.Violation.Builder, com.google.cloud.assuredworkloads.v1.ViolationOrBuilder> getViolationsFieldBuilder() { if (violationsBuilder_ == null) { violationsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.assuredworkloads.v1.Violation, com.google.cloud.assuredworkloads.v1.Violation.Builder, com.google.cloud.assuredworkloads.v1.ViolationOrBuilder>( violations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); violations_ = null; } return violationsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token. Returns empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The next page token. Returns empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The next page token. Returns empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The next page token. Returns empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The next page token. Returns empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1.ListViolationsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1.ListViolationsResponse) private static final com.google.cloud.assuredworkloads.v1.ListViolationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1.ListViolationsResponse(); } public static com.google.cloud.assuredworkloads.v1.ListViolationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListViolationsResponse> PARSER = new com.google.protobuf.AbstractParser<ListViolationsResponse>() { @java.lang.Override public ListViolationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListViolationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListViolationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.assuredworkloads.v1.ListViolationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/jena
36,474
jena-core/src/test/java/org/apache/jena/ontology/impl/TestOntResource.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Package /////////////// package org.apache.jena.ontology.impl; // Imports /////////////// import junit.framework.TestSuite; import org.apache.jena.ontology.*; import org.apache.jena.rdf.model.Literal; import org.apache.jena.rdf.model.NodeIterator; import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.Resource; import org.apache.jena.vocabulary.RDF; /** * <p> * Unit test cases for ontology resources * </p> */ @SuppressWarnings("removal") public class TestOntResource extends OntTestBase { // Constants ////////////////////////////////// // Static variables ////////////////////////////////// // Instance variables ////////////////////////////////// // Constructors ////////////////////////////////// static public TestSuite suite() { return new TestOntResource( "TestResource" ); } public TestOntResource( String name ) { super( name ); } // External signature methods ////////////////////////////////// // Internal implementation methods ////////////////////////////////// @Override public OntTestCase[] getTests() { return new OntTestCase[] { new OntTestCase( "OntResource.sameAs", true, false, false ) { @Override public void ontTest( OntModel m ) { Profile prof = m.getProfile(); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); OntResource b = m.getResource( NS + "b" ).as( OntResource.class ); OntResource c = m.getResource( NS + "c" ).as( OntResource.class ); a.addSameAs( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.SAME_AS() ) ); assertEquals( "a should be sameAs b", b, a.getSameAs() ); a.addSameAs( c ); assertEquals( "Cardinality should be 2", 2, a.getCardinality( prof.SAME_AS() ) ); iteratorTest( a.listSameAs(), new Object[] {b, c} ); assertTrue( "a should be the same as b", a.isSameAs( b ) ); assertTrue( "a should be the same as c", a.isSameAs( c ) ); a.setSameAs( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.SAME_AS() ) ); assertEquals( "a should be sameAs b", b, a.getSameAs() ); a.removeSameAs( c ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.SAME_AS() ) ); a.removeSameAs( b ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( prof.SAME_AS() ) ); } }, new OntTestCase( "OntResource.differentFrom", true, true, false ) { @Override public void ontTest( OntModel m ) { Profile prof = m.getProfile(); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); OntResource b = m.getResource( NS + "b" ).as( OntResource.class ); OntResource c = m.getResource( NS + "c" ).as( OntResource.class ); a.addDifferentFrom( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.DIFFERENT_FROM() ) ); assertEquals( "a should be differentFrom b", b, a.getDifferentFrom() ); a.addDifferentFrom( c ); assertEquals( "Cardinality should be 2", 2, a.getCardinality( prof.DIFFERENT_FROM() ) ); iteratorTest( a.listDifferentFrom(), new Object[] {b, c} ); assertTrue( "a should be diff from b", a.isDifferentFrom( b ) ); assertTrue( "a should be diff from c", a.isDifferentFrom( c ) ); a.setDifferentFrom( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.DIFFERENT_FROM() ) ); assertEquals( "a should be differentFrom b", b, a.getDifferentFrom() ); a.removeDifferentFrom( c ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.DIFFERENT_FROM() ) ); a.removeDifferentFrom( b ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( prof.DIFFERENT_FROM() ) ); } }, new OntTestCase( "OntResource.seeAlso", true, true, true ) { @Override public void ontTest( OntModel m ) { Profile prof = m.getProfile(); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); OntResource b = m.getResource( NS + "b" ).as( OntResource.class ); OntResource c = m.getResource( NS + "c" ).as( OntResource.class ); a.addSeeAlso( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.SEE_ALSO() ) ); assertEquals( "a should be seeAlso b", b, a.getSeeAlso() ); a.addSeeAlso( c ); assertEquals( "Cardinality should be 2", 2, a.getCardinality( prof.SEE_ALSO() ) ); iteratorTest( a.listSeeAlso(), new Object[] {b, c} ); assertTrue( "a should have seeAlso b", a.hasSeeAlso( b ) ); assertTrue( "a should have seeAlso c", a.hasSeeAlso( c ) ); a.setSeeAlso( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.SEE_ALSO() ) ); assertEquals( "a should be seeAlso b", b, a.getSeeAlso() ); a.removeSeeAlso( c ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.SEE_ALSO() ) ); a.removeSeeAlso( b ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( prof.SEE_ALSO() ) ); } }, new OntTestCase( "OntResource.isDefinedBy", true, true, true ) { @Override public void ontTest( OntModel m ) { Profile prof = m.getProfile(); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); OntResource b = m.getResource( NS + "b" ).as( OntResource.class ); OntResource c = m.getResource( NS + "c" ).as( OntResource.class ); a.addIsDefinedBy( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.IS_DEFINED_BY() ) ); assertEquals( "a should be isDefinedBy b", b, a.getIsDefinedBy() ); a.addIsDefinedBy( c ); assertEquals( "Cardinality should be 2", 2, a.getCardinality( prof.IS_DEFINED_BY() ) ); iteratorTest( a.listIsDefinedBy(), new Object[] {b, c} ); assertTrue( "a should be defined by b", a.isDefinedBy( b ) ); assertTrue( "a should be defined by c", a.isDefinedBy( c ) ); a.setIsDefinedBy( b ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.IS_DEFINED_BY() ) ); assertEquals( "a should be isDefinedBy b", b, a.getIsDefinedBy() ); a.removeDefinedBy( c ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.IS_DEFINED_BY() ) ); a.removeDefinedBy( b ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( prof.IS_DEFINED_BY() ) ); } }, new OntTestCase( "OntResource.versionInfo", true, true, false ) { @Override public void ontTest( OntModel m ) { Profile prof = m.getProfile(); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); a.addVersionInfo( "some info" ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.VERSION_INFO() ) ); assertEquals( "a has wrong version info", "some info", a.getVersionInfo() ); a.addVersionInfo( "more info" ); assertEquals( "Cardinality should be 2", 2, a.getCardinality( prof.VERSION_INFO() ) ); iteratorTest( a.listVersionInfo(), new Object[] {"some info", "more info"} ); assertTrue( "a should have some info", a.hasVersionInfo( "some info" ) ); assertTrue( "a should have more info", a.hasVersionInfo( "more info" ) ); a.setVersionInfo( "new info" ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.VERSION_INFO() ) ); assertEquals( "a has wrong version info", "new info", a.getVersionInfo() ); a.removeVersionInfo( "old info" ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.VERSION_INFO() ) ); a.removeVersionInfo( "new info" ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( prof.VERSION_INFO() ) ); } }, new OntTestCase( "OntResource.label.nolang", true, true, true ) { @Override public void ontTest( OntModel m ) { Profile prof = m.getProfile(); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); a.addLabel( "some info", null ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.LABEL() ) ); assertEquals( "a has wrong label", "some info", a.getLabel( null ) ); a.addLabel( "more info", null ); assertEquals( "Cardinality should be 2", 2, a.getCardinality( prof.LABEL() ) ); iteratorTest( a.listLabels( null ), new Object[] {m.createLiteral( "some info" ), m.createLiteral( "more info" )} ); assertTrue( "a should have label some info", a.hasLabel( "some info", null ) ); assertTrue( "a should have label more info", a.hasLabel( "more info", null ) ); a.setLabel( "new info", null ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.LABEL() ) ); assertEquals( "a has wrong label", "new info", a.getLabel( null ) ); a.removeLabel( "foo", null ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.LABEL() ) ); a.removeLabel( "new info", null ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( prof.LABEL() ) ); } }, new OntTestCase( "OntResource.label.lang", true, true, true ) { @Override public void ontTest( OntModel m ) { OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); a.addLabel( "good", "EN" ); assertEquals( "wrong label", "good", a.getLabel( null ) ); a.addLabel( "bon", "FR" ); assertEquals( "wrong label", "good", a.getLabel( "EN" ) ); assertEquals( "wrong label", null, a.getLabel( "EN-GB" ) ); // no literal with a specific enough language assertEquals( "wrong label", "bon", a.getLabel( "FR" ) ); assertTrue( "a should have label good", a.hasLabel( "good", "EN" ) ); assertTrue( "a should have label bon", a.hasLabel( "bon", "FR" ) ); assertTrue( "a should note have label good (DE)", !a.hasLabel( "good", "DE" ) ); a.addLabel( "spiffing", "EN-GB" ); a.addLabel( "duude", "EN-US" ); assertEquals( "wrong label", "spiffing", a.getLabel( "EN-GB" ) ); assertEquals( "wrong label", "duude", a.getLabel( "EN-US" ) ); assertEquals( "wrong label", null, a.getLabel( "DE" ) ); a.addLabel( "abcdef", "AB-CD" ); assertEquals( "wrong label", "abcdef", a.getLabel( "AB" ) ); assertEquals( "wrong label", null, a.getLabel( "AB-XY" ) ); a.removeLabel( "abcde", "AB-CD" ); assertEquals( "Cardinality should be 5", 5, a.getCardinality( a.getProfile().LABEL() ) ); a.removeLabel( "abcdef", "AB-CD" ); assertEquals( "Cardinality should be 4", 4, a.getCardinality( a.getProfile().LABEL() ) ); } }, new OntTestCase( "OntResource.comment.nolang", true, true, true ) { @Override public void ontTest( OntModel m ) { Profile prof = m.getProfile(); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); a.addComment( "some info", null ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.COMMENT() ) ); assertEquals( "a has wrong comment", "some info", a.getComment( null ) ); a.addComment( "more info", null ); assertEquals( "Cardinality should be 2", 2, a.getCardinality( prof.COMMENT() ) ); iteratorTest( a.listComments( null ), new Object[] {m.createLiteral( "some info" ), m.createLiteral( "more info" )} ); assertTrue( "a should have comment some info", a.hasComment( "some info", null ) ); assertTrue( "a should have comment more info", a.hasComment( "more info", null ) ); a.setComment( "new info", null ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.COMMENT() ) ); assertEquals( "a has wrong comment", "new info", a.getComment( null ) ); a.removeComment( "foo", null ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( prof.COMMENT() ) ); a.removeComment( "new info", null ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( prof.COMMENT() ) ); } }, new OntTestCase( "OntResource.comment.lang", true, true, true ) { @Override public void ontTest( OntModel m ) { OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); a.addComment( "good", "EN" ); assertEquals( "wrong comment", "good", a.getComment( null ) ); a.addComment( "bon", "FR" ); assertEquals( "wrong comment", "good", a.getComment( "EN" ) ); assertEquals( "wrong comment", null, a.getComment( "EN-GB" ) ); // no literal with a specific enough language assertEquals( "wrong comment", "bon", a.getComment( "FR" ) ); assertTrue( "a should have label good", a.hasComment( "good", "EN" ) ); assertTrue( "a should have label bon", a.hasComment( "bon", "FR" ) ); assertTrue( "a should note have label good (DE)", !a.hasComment( "good", "DE" ) ); a.addComment( "spiffing", "EN-GB" ); a.addComment( "duude", "EN-US" ); assertEquals( "wrong comment", "spiffing", a.getComment( "EN-GB" ) ); assertEquals( "wrong comment", "duude", a.getComment( "EN-US" ) ); assertEquals( "wrong comment", null, a.getComment( "DE" ) ); a.addComment( "abcdef", "AB-CD" ); assertEquals( "wrong comment", "abcdef", a.getComment( "AB" ) ); assertEquals( "wrong comment", null, a.getComment( "AB-XY" ) ); a.removeComment( "abcde", "AB-CD" ); assertEquals( "Cardinality should be 5", 5, a.getCardinality( a.getProfile().COMMENT() ) ); a.removeComment( "abcdef", "AB-CD" ); assertEquals( "Cardinality should be 4", 4, a.getCardinality( a.getProfile().COMMENT() ) ); } }, new OntTestCase( "OntResource.type (no inference)", true, true, true ) { @Override public void ontTest( OntModel m ) { OntClass A = m.createClass( NS + "A" ); OntClass B = m.createClass( NS + "B" ); A.addSubClass( B ); OntResource a = m.getResource( NS + "a" ).as( OntResource.class ); assertEquals( "Cardinality of rdf:type is wrong", 0, a.getCardinality( RDF.type ) ); a.addRDFType( B ); assertEquals( "rdf:type of a is wrong", B, a.getRDFType() ); assertEquals( "rdf:type of a is wrong", B, a.getRDFType( false ) ); iteratorTest( a.listRDFTypes( false ), new Object[] {B} ); // only B since we're not using an inference model iteratorTest( a.listRDFTypes( true ), new Object[] {B} ); a.addRDFType( A ); iteratorTest( a.listRDFTypes( false ), new Object[] {A,B} ); iteratorTest( a.listRDFTypes( true ), new Object[] {B} ); assertTrue( "a should not be of class A direct", !a.hasRDFType( A, true )); assertTrue( "a should not be of class B direct", a.hasRDFType( B, true )); OntClass C = m.createClass( NS + "C" ); a.setRDFType( C ); assertTrue( "a should be of class C", a.hasRDFType( C, false )); assertTrue( "a should not be of class A", !a.hasRDFType( A, false )); assertTrue( "a should not be of class B", !a.hasRDFType( B, false )); a.removeRDFType( B ); assertEquals( "Cardinality should be 1", 1, a.getCardinality( RDF.type ) ); a.removeRDFType( C ); assertEquals( "Cardinality should be 0", 0, a.getCardinality( RDF.type ) ); } }, new OntTestCase( "OntResource.remove", true, true, true ) { @Override public void ontTest( OntModel m ) { OntClass A = m.createClass( NS + "A" ); OntClass B = m.createClass( NS + "B" ); OntClass C = m.createClass( NS + "C" ); OntClass D = m.createClass( NS + "D" ); OntClass E = m.createClass( NS + "E" ); A.addSubClass( B ); A.addSubClass( C ); C.addSubClass( D ); C.addSubClass( E ); assertTrue( "super-class of E", E.hasSuperClass( C, false ) ); iteratorTest( A.listSubClasses(), new Object[] {B,C} ); C.remove(); assertTrue( "super-class of D", !D.hasSuperClass( C, false ) ); assertTrue( "super-class of E", !E.hasSuperClass( C, false ) ); iteratorTest( A.listSubClasses(), new Object[] {B} ); } }, new OntTestCase( "OntResource.asClass", true, true, true ) { @Override public void ontTest( OntModel m ) { Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().CLASS() ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertTrue( "should be class", or.isClass() ); assertFalse( "should not be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertTrue( "should not be individual", owlFull() || !or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asClass(); assertTrue( "Should be OntClass", n instanceof OntClass ); } }, new OntTestCase( "OntResource.asAnnotationProperty", true, true, false) { @Override public void ontTest( OntModel m ) { if (m.getProfile().ANNOTATION_PROPERTY() == null) { throw new ProfileException(null,null); } Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().ANNOTATION_PROPERTY() ); OntResource or = r.as( OntResource.class ); assertTrue( "should be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertTrue( "should be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertFalse( "should not be individual", or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asAnnotationProperty(); assertTrue( "Should be AnnotationProperty", n instanceof AnnotationProperty); } }, new OntTestCase( "OntResource.asObjectProperty", true, true, false) { @Override public void ontTest( OntModel m ) { if (m.getProfile().OBJECT_PROPERTY() == null) { throw new ProfileException(null,null); } Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().OBJECT_PROPERTY() ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertTrue( "should be property", or.isProperty() ); assertTrue( "should be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertFalse( "should not be individual", or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asObjectProperty(); assertTrue( "Should be ObjectProperty", n instanceof ObjectProperty); } }, new OntTestCase( "OntResource.asDatatypeProperty", true, true, false) { @Override public void ontTest( OntModel m ) { if (m.getProfile().DATATYPE_PROPERTY() == null) { throw new ProfileException(null,null); } Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().DATATYPE_PROPERTY() ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertTrue( "should be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertTrue( "should be datatype property", or.isDatatypeProperty() ); assertFalse( "should not be individual", or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asDatatypeProperty(); assertTrue( "Should be DatatypeProperty", n instanceof DatatypeProperty); } }, new OntTestCase( "OntResource.asAllDifferent", true, true, false) { @Override public void ontTest( OntModel m ) { if (m.getProfile().ALL_DIFFERENT() == null) { throw new ProfileException(null,null); } Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().ALL_DIFFERENT() ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertTrue( "should be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertFalse( "should not be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertFalse( "should not be individual", or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asAllDifferent(); assertTrue( "Should be AnnotationProperty", n instanceof AllDifferent); } }, new OntTestCase( "OntResource.asProperty", true, true, true ) { @Override public void ontTest( OntModel m ) { Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().PROPERTY() ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertTrue( "should be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertFalse( "should not be individual", or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asProperty(); assertTrue( "Should be OntProperty", n instanceof OntProperty); } }, new OntTestCase( "OntResource.asIndividual", true, true, true ) { @Override public void ontTest( OntModel m ) { Resource r = m.createResource(); Resource s = m.createResource(); s.addProperty( RDF.type, m.getProfile().CLASS() ); r.addProperty( RDF.type, s ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertFalse( "should not be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertTrue( "should be individual", or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asIndividual(); assertTrue( "Should be individual", n instanceof Individual); } }, new OntTestCase( "OntResource.asDataRange", true, false, false ) { @Override public void ontTest( OntModel m ) { if (m.getProfile().DATARANGE() == null) { throw new ProfileException(null,null); } Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().DATARANGE() ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertFalse( "should not be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertFalse( "should not be individual", or.isIndividual() ); assertTrue( "should be data range", or.isDataRange() ); assertFalse( "should not be ontology", or.isOntology() ); RDFNode n = or.asDataRange(); assertTrue( "Should be DataRange", n instanceof DataRange ); } }, new OntTestCase( "OntResource.asOntology", true, true, false ) { @Override public void ontTest( OntModel m ) { if (m.getProfile().ONTOLOGY() == null) { throw new ProfileException(null,null); } Resource r = m.createResource(); r.addProperty( RDF.type, m.getProfile().ONTOLOGY() ); OntResource or = r.as( OntResource.class ); assertFalse( "should not be annotation prop", or.isAnnotationProperty() ); assertFalse( "should not be all different", or.isAllDifferent() ); assertFalse( "should not be class", or.isClass() ); assertFalse( "should not be property", or.isProperty() ); assertFalse( "should not be object property", or.isObjectProperty() ); assertFalse( "should not be datatype property", or.isDatatypeProperty() ); assertFalse( "should not be individual", or.isIndividual() ); assertFalse( "should not be data range", or.isDataRange() ); assertTrue( "should be ontology", or.isOntology() ); RDFNode n = or.asOntology(); assertTrue( "Should be Ontology", n instanceof Ontology); } }, new OntTestCase( "OntResource.isLanguageTerm", true, true, true ) { @Override public void ontTest( OntModel m ) { // class is defined (differently) in every profile OntResource or = m.getProfile().CLASS().inModel(m).as( OntResource.class ); assertTrue( "should be a lang term", or.isOntLanguageTerm() ); or = m.createOntResource( "http://foo/bar" ); assertFalse( "should not be a lang term", or.isOntLanguageTerm() ); } }, new OntTestCase( "OntResource.getOntModel", true, true, true ) { @Override public void ontTest( OntModel m ) { OntResource or = m.createOntResource( "http://foo/bar" ); OntModel m0 = or.getOntModel(); assertEquals( m, m0 ); } }, new OntTestCase( "OntResource.getPropertyValue - object prop", true, true, true ) { @Override public void ontTest( OntModel m ) { OntResource a = m.createOntResource( "http://foo/bar#a" ); Resource b = m.createResource( "http://foo/bar#b" ); OntProperty p = m.createOntProperty( "http://foo/bar#p" ); m.add( a, p, b ); Object bb = a.getPropertyValue( p ); assertEquals( b, bb ); assertTrue( "Return value should be an OntResource", bb instanceof OntResource ); } }, new OntTestCase( "OntResource.getPropertyValue - missing prop", true, true, true ) { @Override public void ontTest( OntModel m ) { OntResource a = m.createOntResource( "http://foo/bar#a" ); Resource b = m.createResource( "http://foo/bar#b" ); OntProperty p = m.createOntProperty( "http://foo/bar#p" ); OntProperty q = m.createOntProperty( "http://foo/bar#q" ); m.add( a, p, b ); Object bb = a.getPropertyValue( q ); assertNull( bb ); } }, new OntTestCase( "OntResource.listPropertyValues - object prop", true, true, true ) { @Override public void ontTest( OntModel m ) { OntResource a = m.createOntResource( "http://foo/bar#a" ); Resource b = m.createResource( "http://foo/bar#b" ); OntProperty p = m.createOntProperty( "http://foo/bar#p" ); Literal l = m.createTypedLiteral( false ); m.add( a, p, b ); m.add( a, p, l ); NodeIterator ni = a.listPropertyValues( p ); while (ni.hasNext()) { RDFNode n = ni.nextNode(); if (n.isResource()) { assertEquals( b, n ); assertTrue( "Return value should be an OntResource", n instanceof OntResource ); } } } }, }; } //============================================================================== // Inner class definitions //============================================================================== }
googleapis/google-cloud-java
36,597
java-datacatalog/proto-google-cloud-datacatalog-v1beta1/src/main/java/com/google/cloud/datacatalog/v1beta1/CreateEntryRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1beta1/datacatalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1beta1; /** * * * <pre> * Request message for * [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.CreateEntryRequest} */ public final class CreateEntryRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.CreateEntryRequest) CreateEntryRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateEntryRequest.newBuilder() to construct. private CreateEntryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateEntryRequest() { parent_ = ""; entryId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateEntryRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateEntryRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateEntryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.CreateEntryRequest.class, com.google.cloud.datacatalog.v1beta1.CreateEntryRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the entry group this entry is in. Example: * * * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} * * Note that this Entry and its child resources may not actually be stored in * the location in this name. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the entry group this entry is in. Example: * * * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} * * Note that this Entry and its child resources may not actually be stored in * the location in this name. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENTRY_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object entryId_ = ""; /** * * * <pre> * Required. The id of the entry to create. * </pre> * * <code>string entry_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The entryId. */ @java.lang.Override public java.lang.String getEntryId() { java.lang.Object ref = entryId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); entryId_ = s; return s; } } /** * * * <pre> * Required. The id of the entry to create. * </pre> * * <code>string entry_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for entryId. */ @java.lang.Override public com.google.protobuf.ByteString getEntryIdBytes() { java.lang.Object ref = entryId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); entryId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENTRY_FIELD_NUMBER = 2; private com.google.cloud.datacatalog.v1beta1.Entry entry_; /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entry field is set. */ @java.lang.Override public boolean hasEntry() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entry. */ @java.lang.Override public com.google.cloud.datacatalog.v1beta1.Entry getEntry() { return entry_ == null ? com.google.cloud.datacatalog.v1beta1.Entry.getDefaultInstance() : entry_; } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.datacatalog.v1beta1.EntryOrBuilder getEntryOrBuilder() { return entry_ == null ? com.google.cloud.datacatalog.v1beta1.Entry.getDefaultInstance() : entry_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getEntry()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(entryId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, entryId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEntry()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(entryId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, entryId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.CreateEntryRequest)) { return super.equals(obj); } com.google.cloud.datacatalog.v1beta1.CreateEntryRequest other = (com.google.cloud.datacatalog.v1beta1.CreateEntryRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getEntryId().equals(other.getEntryId())) return false; if (hasEntry() != other.hasEntry()) return false; if (hasEntry()) { if (!getEntry().equals(other.getEntry())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + ENTRY_ID_FIELD_NUMBER; hash = (53 * hash) + getEntryId().hashCode(); if (hasEntry()) { hash = (37 * hash) + ENTRY_FIELD_NUMBER; hash = (53 * hash) + getEntry().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1beta1.CreateEntryRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.CreateEntryRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.CreateEntryRequest) com.google.cloud.datacatalog.v1beta1.CreateEntryRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateEntryRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateEntryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.CreateEntryRequest.class, com.google.cloud.datacatalog.v1beta1.CreateEntryRequest.Builder.class); } // Construct using com.google.cloud.datacatalog.v1beta1.CreateEntryRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEntryFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; entryId_ = ""; entry_ = null; if (entryBuilder_ != null) { entryBuilder_.dispose(); entryBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateEntryRequest_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateEntryRequest getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1beta1.CreateEntryRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateEntryRequest build() { com.google.cloud.datacatalog.v1beta1.CreateEntryRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateEntryRequest buildPartial() { com.google.cloud.datacatalog.v1beta1.CreateEntryRequest result = new com.google.cloud.datacatalog.v1beta1.CreateEntryRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.datacatalog.v1beta1.CreateEntryRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.entryId_ = entryId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.entry_ = entryBuilder_ == null ? entry_ : entryBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1beta1.CreateEntryRequest) { return mergeFrom((com.google.cloud.datacatalog.v1beta1.CreateEntryRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.CreateEntryRequest other) { if (other == com.google.cloud.datacatalog.v1beta1.CreateEntryRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getEntryId().isEmpty()) { entryId_ = other.entryId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasEntry()) { mergeEntry(other.getEntry()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getEntryFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 18 case 26: { entryId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the entry group this entry is in. Example: * * * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} * * Note that this Entry and its child resources may not actually be stored in * the location in this name. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the entry group this entry is in. Example: * * * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} * * Note that this Entry and its child resources may not actually be stored in * the location in this name. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the entry group this entry is in. Example: * * * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} * * Note that this Entry and its child resources may not actually be stored in * the location in this name. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the entry group this entry is in. Example: * * * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} * * Note that this Entry and its child resources may not actually be stored in * the location in this name. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the entry group this entry is in. Example: * * * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} * * Note that this Entry and its child resources may not actually be stored in * the location in this name. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object entryId_ = ""; /** * * * <pre> * Required. The id of the entry to create. * </pre> * * <code>string entry_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The entryId. */ public java.lang.String getEntryId() { java.lang.Object ref = entryId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); entryId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The id of the entry to create. * </pre> * * <code>string entry_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for entryId. */ public com.google.protobuf.ByteString getEntryIdBytes() { java.lang.Object ref = entryId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); entryId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The id of the entry to create. * </pre> * * <code>string entry_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The entryId to set. * @return This builder for chaining. */ public Builder setEntryId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } entryId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The id of the entry to create. * </pre> * * <code>string entry_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearEntryId() { entryId_ = getDefaultInstance().getEntryId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The id of the entry to create. * </pre> * * <code>string entry_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for entryId to set. * @return This builder for chaining. */ public Builder setEntryIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); entryId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.datacatalog.v1beta1.Entry entry_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.Entry, com.google.cloud.datacatalog.v1beta1.Entry.Builder, com.google.cloud.datacatalog.v1beta1.EntryOrBuilder> entryBuilder_; /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entry field is set. */ public boolean hasEntry() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entry. */ public com.google.cloud.datacatalog.v1beta1.Entry getEntry() { if (entryBuilder_ == null) { return entry_ == null ? com.google.cloud.datacatalog.v1beta1.Entry.getDefaultInstance() : entry_; } else { return entryBuilder_.getMessage(); } } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntry(com.google.cloud.datacatalog.v1beta1.Entry value) { if (entryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entry_ = value; } else { entryBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntry(com.google.cloud.datacatalog.v1beta1.Entry.Builder builderForValue) { if (entryBuilder_ == null) { entry_ = builderForValue.build(); } else { entryBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeEntry(com.google.cloud.datacatalog.v1beta1.Entry value) { if (entryBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && entry_ != null && entry_ != com.google.cloud.datacatalog.v1beta1.Entry.getDefaultInstance()) { getEntryBuilder().mergeFrom(value); } else { entry_ = value; } } else { entryBuilder_.mergeFrom(value); } if (entry_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearEntry() { bitField0_ = (bitField0_ & ~0x00000004); entry_ = null; if (entryBuilder_ != null) { entryBuilder_.dispose(); entryBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.datacatalog.v1beta1.Entry.Builder getEntryBuilder() { bitField0_ |= 0x00000004; onChanged(); return getEntryFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.datacatalog.v1beta1.EntryOrBuilder getEntryOrBuilder() { if (entryBuilder_ != null) { return entryBuilder_.getMessageOrBuilder(); } else { return entry_ == null ? com.google.cloud.datacatalog.v1beta1.Entry.getDefaultInstance() : entry_; } } /** * * * <pre> * Required. The entry to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.Entry entry = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.Entry, com.google.cloud.datacatalog.v1beta1.Entry.Builder, com.google.cloud.datacatalog.v1beta1.EntryOrBuilder> getEntryFieldBuilder() { if (entryBuilder_ == null) { entryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.Entry, com.google.cloud.datacatalog.v1beta1.Entry.Builder, com.google.cloud.datacatalog.v1beta1.EntryOrBuilder>( getEntry(), getParentForChildren(), isClean()); entry_ = null; } return entryBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.CreateEntryRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.CreateEntryRequest) private static final com.google.cloud.datacatalog.v1beta1.CreateEntryRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.CreateEntryRequest(); } public static com.google.cloud.datacatalog.v1beta1.CreateEntryRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateEntryRequest> PARSER = new com.google.protobuf.AbstractParser<CreateEntryRequest>() { @java.lang.Override public CreateEntryRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateEntryRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateEntryRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateEntryRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,673
java-life-sciences/proto-google-cloud-life-sciences-v2beta/src/main/java/com/google/cloud/lifesciences/v2beta/ContainerStartedEvent.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/lifesciences/v2beta/workflows.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.lifesciences.v2beta; /** * * * <pre> * An event generated when a container starts. * </pre> * * Protobuf type {@code google.cloud.lifesciences.v2beta.ContainerStartedEvent} */ public final class ContainerStartedEvent extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.lifesciences.v2beta.ContainerStartedEvent) ContainerStartedEventOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerStartedEvent.newBuilder() to construct. private ContainerStartedEvent(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ContainerStartedEvent() { ipAddress_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ContainerStartedEvent(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_ContainerStartedEvent_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 2: return internalGetPortMappings(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_ContainerStartedEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.lifesciences.v2beta.ContainerStartedEvent.class, com.google.cloud.lifesciences.v2beta.ContainerStartedEvent.Builder.class); } public static final int ACTION_ID_FIELD_NUMBER = 1; private int actionId_ = 0; /** * * * <pre> * The numeric ID of the action that started this container. * </pre> * * <code>int32 action_id = 1;</code> * * @return The actionId. */ @java.lang.Override public int getActionId() { return actionId_; } public static final int PORT_MAPPINGS_FIELD_NUMBER = 2; private static final class PortMappingsDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.Integer, java.lang.Integer> defaultEntry = com.google.protobuf.MapEntry.<java.lang.Integer, java.lang.Integer>newDefaultInstance( com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_ContainerStartedEvent_PortMappingsEntry_descriptor, com.google.protobuf.WireFormat.FieldType.INT32, 0, com.google.protobuf.WireFormat.FieldType.INT32, 0); } @SuppressWarnings("serial") private com.google.protobuf.MapField<java.lang.Integer, java.lang.Integer> portMappings_; private com.google.protobuf.MapField<java.lang.Integer, java.lang.Integer> internalGetPortMappings() { if (portMappings_ == null) { return com.google.protobuf.MapField.emptyMapField( PortMappingsDefaultEntryHolder.defaultEntry); } return portMappings_; } public int getPortMappingsCount() { return internalGetPortMappings().getMap().size(); } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public boolean containsPortMappings(int key) { return internalGetPortMappings().getMap().containsKey(key); } /** Use {@link #getPortMappingsMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.Integer, java.lang.Integer> getPortMappings() { return getPortMappingsMap(); } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public java.util.Map<java.lang.Integer, java.lang.Integer> getPortMappingsMap() { return internalGetPortMappings().getMap(); } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public int getPortMappingsOrDefault(int key, int defaultValue) { java.util.Map<java.lang.Integer, java.lang.Integer> map = internalGetPortMappings().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public int getPortMappingsOrThrow(int key) { java.util.Map<java.lang.Integer, java.lang.Integer> map = internalGetPortMappings().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public static final int IP_ADDRESS_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object ipAddress_ = ""; /** * * * <pre> * The public IP address that can be used to connect to the container. This * field is only populated when at least one port mapping is present. If the * instance was created with a private address, this field will be empty even * if port mappings exist. * </pre> * * <code>string ip_address = 3;</code> * * @return The ipAddress. */ @java.lang.Override public java.lang.String getIpAddress() { java.lang.Object ref = ipAddress_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); ipAddress_ = s; return s; } } /** * * * <pre> * The public IP address that can be used to connect to the container. This * field is only populated when at least one port mapping is present. If the * instance was created with a private address, this field will be empty even * if port mappings exist. * </pre> * * <code>string ip_address = 3;</code> * * @return The bytes for ipAddress. */ @java.lang.Override public com.google.protobuf.ByteString getIpAddressBytes() { java.lang.Object ref = ipAddress_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); ipAddress_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (actionId_ != 0) { output.writeInt32(1, actionId_); } com.google.protobuf.GeneratedMessageV3.serializeIntegerMapTo( output, internalGetPortMappings(), PortMappingsDefaultEntryHolder.defaultEntry, 2); if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ipAddress_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, ipAddress_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (actionId_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, actionId_); } for (java.util.Map.Entry<java.lang.Integer, java.lang.Integer> entry : internalGetPortMappings().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.Integer, java.lang.Integer> portMappings__ = PortMappingsDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, portMappings__); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ipAddress_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, ipAddress_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.lifesciences.v2beta.ContainerStartedEvent)) { return super.equals(obj); } com.google.cloud.lifesciences.v2beta.ContainerStartedEvent other = (com.google.cloud.lifesciences.v2beta.ContainerStartedEvent) obj; if (getActionId() != other.getActionId()) return false; if (!internalGetPortMappings().equals(other.internalGetPortMappings())) return false; if (!getIpAddress().equals(other.getIpAddress())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ACTION_ID_FIELD_NUMBER; hash = (53 * hash) + getActionId(); if (!internalGetPortMappings().getMap().isEmpty()) { hash = (37 * hash) + PORT_MAPPINGS_FIELD_NUMBER; hash = (53 * hash) + internalGetPortMappings().hashCode(); } hash = (37 * hash) + IP_ADDRESS_FIELD_NUMBER; hash = (53 * hash) + getIpAddress().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.lifesciences.v2beta.ContainerStartedEvent prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * An event generated when a container starts. * </pre> * * Protobuf type {@code google.cloud.lifesciences.v2beta.ContainerStartedEvent} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.lifesciences.v2beta.ContainerStartedEvent) com.google.cloud.lifesciences.v2beta.ContainerStartedEventOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_ContainerStartedEvent_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 2: return internalGetPortMappings(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection( int number) { switch (number) { case 2: return internalGetMutablePortMappings(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_ContainerStartedEvent_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.lifesciences.v2beta.ContainerStartedEvent.class, com.google.cloud.lifesciences.v2beta.ContainerStartedEvent.Builder.class); } // Construct using com.google.cloud.lifesciences.v2beta.ContainerStartedEvent.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; actionId_ = 0; internalGetMutablePortMappings().clear(); ipAddress_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_ContainerStartedEvent_descriptor; } @java.lang.Override public com.google.cloud.lifesciences.v2beta.ContainerStartedEvent getDefaultInstanceForType() { return com.google.cloud.lifesciences.v2beta.ContainerStartedEvent.getDefaultInstance(); } @java.lang.Override public com.google.cloud.lifesciences.v2beta.ContainerStartedEvent build() { com.google.cloud.lifesciences.v2beta.ContainerStartedEvent result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.lifesciences.v2beta.ContainerStartedEvent buildPartial() { com.google.cloud.lifesciences.v2beta.ContainerStartedEvent result = new com.google.cloud.lifesciences.v2beta.ContainerStartedEvent(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.lifesciences.v2beta.ContainerStartedEvent result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.actionId_ = actionId_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.portMappings_ = internalGetPortMappings(); result.portMappings_.makeImmutable(); } if (((from_bitField0_ & 0x00000004) != 0)) { result.ipAddress_ = ipAddress_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.lifesciences.v2beta.ContainerStartedEvent) { return mergeFrom((com.google.cloud.lifesciences.v2beta.ContainerStartedEvent) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.lifesciences.v2beta.ContainerStartedEvent other) { if (other == com.google.cloud.lifesciences.v2beta.ContainerStartedEvent.getDefaultInstance()) return this; if (other.getActionId() != 0) { setActionId(other.getActionId()); } internalGetMutablePortMappings().mergeFrom(other.internalGetPortMappings()); bitField0_ |= 0x00000002; if (!other.getIpAddress().isEmpty()) { ipAddress_ = other.ipAddress_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { actionId_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { com.google.protobuf.MapEntry<java.lang.Integer, java.lang.Integer> portMappings__ = input.readMessage( PortMappingsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); internalGetMutablePortMappings() .getMutableMap() .put(portMappings__.getKey(), portMappings__.getValue()); bitField0_ |= 0x00000002; break; } // case 18 case 26: { ipAddress_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int actionId_; /** * * * <pre> * The numeric ID of the action that started this container. * </pre> * * <code>int32 action_id = 1;</code> * * @return The actionId. */ @java.lang.Override public int getActionId() { return actionId_; } /** * * * <pre> * The numeric ID of the action that started this container. * </pre> * * <code>int32 action_id = 1;</code> * * @param value The actionId to set. * @return This builder for chaining. */ public Builder setActionId(int value) { actionId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The numeric ID of the action that started this container. * </pre> * * <code>int32 action_id = 1;</code> * * @return This builder for chaining. */ public Builder clearActionId() { bitField0_ = (bitField0_ & ~0x00000001); actionId_ = 0; onChanged(); return this; } private com.google.protobuf.MapField<java.lang.Integer, java.lang.Integer> portMappings_; private com.google.protobuf.MapField<java.lang.Integer, java.lang.Integer> internalGetPortMappings() { if (portMappings_ == null) { return com.google.protobuf.MapField.emptyMapField( PortMappingsDefaultEntryHolder.defaultEntry); } return portMappings_; } private com.google.protobuf.MapField<java.lang.Integer, java.lang.Integer> internalGetMutablePortMappings() { if (portMappings_ == null) { portMappings_ = com.google.protobuf.MapField.newMapField(PortMappingsDefaultEntryHolder.defaultEntry); } if (!portMappings_.isMutable()) { portMappings_ = portMappings_.copy(); } bitField0_ |= 0x00000002; onChanged(); return portMappings_; } public int getPortMappingsCount() { return internalGetPortMappings().getMap().size(); } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public boolean containsPortMappings(int key) { return internalGetPortMappings().getMap().containsKey(key); } /** Use {@link #getPortMappingsMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.Integer, java.lang.Integer> getPortMappings() { return getPortMappingsMap(); } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public java.util.Map<java.lang.Integer, java.lang.Integer> getPortMappingsMap() { return internalGetPortMappings().getMap(); } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public int getPortMappingsOrDefault(int key, int defaultValue) { java.util.Map<java.lang.Integer, java.lang.Integer> map = internalGetPortMappings().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ @java.lang.Override public int getPortMappingsOrThrow(int key) { java.util.Map<java.lang.Integer, java.lang.Integer> map = internalGetPortMappings().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearPortMappings() { bitField0_ = (bitField0_ & ~0x00000002); internalGetMutablePortMappings().getMutableMap().clear(); return this; } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ public Builder removePortMappings(int key) { internalGetMutablePortMappings().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.Integer, java.lang.Integer> getMutablePortMappings() { bitField0_ |= 0x00000002; return internalGetMutablePortMappings().getMutableMap(); } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ public Builder putPortMappings(int key, int value) { internalGetMutablePortMappings().getMutableMap().put(key, value); bitField0_ |= 0x00000002; return this; } /** * * * <pre> * The container-to-host port mappings installed for this container. This * set will contain any ports exposed using the `PUBLISH_EXPOSED_PORTS` flag * as well as any specified in the `Action` definition. * </pre> * * <code>map&lt;int32, int32&gt; port_mappings = 2;</code> */ public Builder putAllPortMappings(java.util.Map<java.lang.Integer, java.lang.Integer> values) { internalGetMutablePortMappings().getMutableMap().putAll(values); bitField0_ |= 0x00000002; return this; } private java.lang.Object ipAddress_ = ""; /** * * * <pre> * The public IP address that can be used to connect to the container. This * field is only populated when at least one port mapping is present. If the * instance was created with a private address, this field will be empty even * if port mappings exist. * </pre> * * <code>string ip_address = 3;</code> * * @return The ipAddress. */ public java.lang.String getIpAddress() { java.lang.Object ref = ipAddress_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); ipAddress_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The public IP address that can be used to connect to the container. This * field is only populated when at least one port mapping is present. If the * instance was created with a private address, this field will be empty even * if port mappings exist. * </pre> * * <code>string ip_address = 3;</code> * * @return The bytes for ipAddress. */ public com.google.protobuf.ByteString getIpAddressBytes() { java.lang.Object ref = ipAddress_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); ipAddress_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The public IP address that can be used to connect to the container. This * field is only populated when at least one port mapping is present. If the * instance was created with a private address, this field will be empty even * if port mappings exist. * </pre> * * <code>string ip_address = 3;</code> * * @param value The ipAddress to set. * @return This builder for chaining. */ public Builder setIpAddress(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ipAddress_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The public IP address that can be used to connect to the container. This * field is only populated when at least one port mapping is present. If the * instance was created with a private address, this field will be empty even * if port mappings exist. * </pre> * * <code>string ip_address = 3;</code> * * @return This builder for chaining. */ public Builder clearIpAddress() { ipAddress_ = getDefaultInstance().getIpAddress(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The public IP address that can be used to connect to the container. This * field is only populated when at least one port mapping is present. If the * instance was created with a private address, this field will be empty even * if port mappings exist. * </pre> * * <code>string ip_address = 3;</code> * * @param value The bytes for ipAddress to set. * @return This builder for chaining. */ public Builder setIpAddressBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ipAddress_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.lifesciences.v2beta.ContainerStartedEvent) } // @@protoc_insertion_point(class_scope:google.cloud.lifesciences.v2beta.ContainerStartedEvent) private static final com.google.cloud.lifesciences.v2beta.ContainerStartedEvent DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.lifesciences.v2beta.ContainerStartedEvent(); } public static com.google.cloud.lifesciences.v2beta.ContainerStartedEvent getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ContainerStartedEvent> PARSER = new com.google.protobuf.AbstractParser<ContainerStartedEvent>() { @java.lang.Override public ContainerStartedEvent parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ContainerStartedEvent> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ContainerStartedEvent> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.lifesciences.v2beta.ContainerStartedEvent getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/httpcomponents-core
36,861
httpcore5/src/test/java/org/apache/hc/core5/ssl/TestSSLContextBuilder.java
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.ssl; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.URL; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.Principal; import java.security.Security; import java.security.UnrecoverableKeyException; import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLException; import javax.net.ssl.SSLParameters; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLServerSocket; import javax.net.ssl.SSLSession; import javax.net.ssl.SSLSocket; import javax.net.ssl.TrustManagerFactory; import org.apache.hc.core5.util.Timeout; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; /** * Unit tests for {@link SSLContextBuilder}. */ class TestSSLContextBuilder { static final String PROVIDER_SUN_JSSE = "SunJSSE"; static final String PROVIDER_SUN_JCE = "SunJCE"; private static boolean isWindows() { return System.getProperty("os.name").contains("Windows"); } private static final Timeout TIMEOUT = Timeout.ofSeconds(5); private ExecutorService executorService; @AfterEach void cleanup() throws Exception { if (this.executorService != null) { this.executorService.shutdown(); this.executorService.awaitTermination(5, TimeUnit.SECONDS); } } private URL getResource(final String name) { return getClass().getResource(name); } @Test void testBuildAllDefaults() throws Exception { final SSLContext sslContext = SSLContextBuilder.create() .setKeyStoreType(KeyStore.getDefaultType()) .setKeyManagerFactoryAlgorithm(KeyManagerFactory.getDefaultAlgorithm()) .setTrustManagerFactoryAlgorithm(TrustManagerFactory.getDefaultAlgorithm()) .setProvider(PROVIDER_SUN_JSSE) .setProtocol("TLS") .setSecureRandom(null) .loadTrustMaterial((KeyStore) null, null) .loadKeyMaterial((KeyStore) null, null, null) .build(); Assertions.assertNotNull(sslContext); Assertions.assertEquals("TLS", sslContext.getProtocol()); Assertions.assertEquals(PROVIDER_SUN_JSSE, sslContext.getProvider().getName()); } @Test void testBuildAllNull() throws Exception { final SSLContext sslContext = SSLContextBuilder.create() .setKeyStoreType(null) .setKeyManagerFactoryAlgorithm(null) .setTrustManagerFactoryAlgorithm(null) .setProtocol(null) .setProvider((String) null) .setSecureRandom(null) .loadTrustMaterial((KeyStore) null, null) .loadKeyMaterial((KeyStore) null, null, null) .build(); Assertions.assertNotNull(sslContext); Assertions.assertEquals("TLS", sslContext.getProtocol()); Assertions.assertEquals(PROVIDER_SUN_JSSE, sslContext.getProvider().getName()); } @Test void testBuildAllNull_deprecated() throws Exception { final SSLContext sslContext = SSLContextBuilder.create() .setProtocol(null) .setSecureRandom(null) .loadTrustMaterial((KeyStore) null, null) .loadKeyMaterial((KeyStore) null, null, null) .build(); Assertions.assertNotNull(sslContext); Assertions.assertEquals("TLS", sslContext.getProtocol()); } @Test void testBuildDefault() { Assertions.assertDoesNotThrow(() -> new SSLContextBuilder().build()); } @Test void testBuildNoSuchKeyManagerFactoryAlgorithm() { final URL resource1 = getResource("/test-keypasswd.p12"); final String storePassword = "nopassword"; final String keyPassword = "password"; Assertions.assertThrows(NoSuchAlgorithmException.class, () -> SSLContextBuilder.create() .setKeyManagerFactoryAlgorithm(" BAD ") .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build()); } @Test void testBuildNoSuchKeyStoreType() { final URL resource1 = getResource("/test-keypasswd.p12"); final String storePassword = "nopassword"; final String keyPassword = "password"; Assertions.assertThrows(KeyStoreException.class, () -> SSLContextBuilder.create() .setKeyStoreType(" BAD ") .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build()); } @Test void testBuildNoSuchTrustManagerFactoryAlgorithm() { final URL resource1 = getResource("/test-keypasswd.p12"); final String storePassword = "nopassword"; Assertions.assertThrows(NoSuchAlgorithmException.class, () -> SSLContextBuilder.create() .setTrustManagerFactoryAlgorithm(" BAD ") .loadTrustMaterial(resource1, storePassword.toCharArray()) .build()); } @Test void testBuildWithProvider() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final DummyProvider provider = new DummyProvider(); SSLContextBuilder.create() .setProvider(provider) .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertTrue(provider.hasBeenRequested("SSLContext")); } @Test void testBuildWithProviderName() throws Exception { final DummyProvider provider = new DummyProvider(); Security.insertProviderAt(provider, 1); try { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; SSLContextBuilder.create() .setProvider(DummyProvider.NAME) .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertTrue(provider.hasBeenRequested("SSLContext")); } finally { Security.removeProvider(DummyProvider.NAME); } } @Test void testBuildKSWithNoSuchProvider() { Assertions.assertThrows(NoSuchProviderException.class, () -> SSLContextBuilder.create() .setKeyStoreProvider("no-such-provider") .build()); } @Test void testBuildKSWithProvider() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final DummyProvider provider = new DummyProvider(); SSLContextBuilder.create() .setKeyStoreProvider(provider) .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertTrue(provider.hasBeenRequested("KeyManagerFactory")); } @Test void testBuildKSWithProviderName() throws Exception { final DummyProvider provider = new DummyProvider(); Security.insertProviderAt(provider, 1); try { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; SSLContextBuilder.create() .setKeyStoreProvider(DummyProvider.NAME) .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertTrue(provider.hasBeenRequested("KeyManagerFactory")); } finally { Security.removeProvider(DummyProvider.NAME); } } @Test void testBuildTSWithNoSuchProvider() { Assertions.assertThrows(NoSuchProviderException.class, () -> SSLContextBuilder.create() .setTrustStoreProvider("no-such-provider") .build()); } @Test void testBuildTSWithProvider() throws Exception { final DummyProvider provider = new DummyProvider(); SSLContextBuilder.create() .setTrustStoreProvider(provider) .loadTrustMaterial((KeyStore) null, null) .build(); Assertions.assertTrue(provider.hasBeenRequested("TrustManagerFactory")); } @Test void testBuildTSWithProviderName() throws Exception { final DummyProvider provider = new DummyProvider(); Security.insertProviderAt(provider, 1); try { SSLContextBuilder.create() .setTrustStoreProvider(DummyProvider.NAME) .loadTrustMaterial((KeyStore) null, null) .build(); Assertions.assertTrue(provider.hasBeenRequested("TrustManagerFactory")); } finally { Security.removeProvider(DummyProvider.NAME); } } @Test void testKeyWithAlternatePasswordInvalid() { final URL resource1 = getResource("/test-keypasswd.p12"); final String storePassword = "nopassword"; final String keyPassword = "!password"; Assertions.assertThrows(UnrecoverableKeyException.class, () -> SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .loadTrustMaterial(resource1, storePassword.toCharArray()) .build()); } @Test void testSSLHandshakeServerTrusted() throws Exception { final URL resource1 = getResource("/test.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource1, storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final ServerSocket serverSocket = serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); final Future<Boolean> future = this.executorService.submit(() -> { try (Socket socket = serverSocket.accept()) { final OutputStream outputStream = socket.getOutputStream(); outputStream.write(new byte[]{'H', 'i'}); outputStream.flush(); } return Boolean.TRUE; }); final int localPort = serverSocket.getLocalPort(); try (final Socket clientSocket = clientSslContext.getSocketFactory().createSocket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); final InputStream inputStream = clientSocket.getInputStream(); Assertions.assertEquals('H', inputStream.read()); Assertions.assertEquals('i', inputStream.read()); Assertions.assertEquals(-1, inputStream.read()); } final Boolean result = future.get(5, TimeUnit.SECONDS); Assertions.assertNotNull(result); } @Test void testSSLHandshakeServerNotTrusted() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final URL resource2 = getResource("/test.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final ServerSocket serverSocket = serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); this.executorService.submit(() -> { try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { socket.getSession(); } return Boolean.FALSE; }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); Assertions.assertThrows(IOException.class, clientSocket::startHandshake); } } @Test void testSSLHandshakeServerCustomTrustStrategy() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final AtomicReference<X509Certificate[]> certChainRef = new AtomicReference<>(); final TrustStrategy trustStrategy = (chain, authType) -> { certChainRef.set(chain); return true; }; final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(trustStrategy) .build(); Assertions.assertNotNull(clientSslContext); final ServerSocket serverSocket = serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); final Future<Boolean> future = this.executorService.submit(() -> { try (Socket socket = serverSocket.accept()) { final OutputStream outputStream = socket.getOutputStream(); outputStream.write(new byte[]{'H', 'i'}); outputStream.flush(); } return Boolean.TRUE; }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); final InputStream inputStream = clientSocket.getInputStream(); Assertions.assertEquals('H', inputStream.read()); Assertions.assertEquals('i', inputStream.read()); Assertions.assertEquals(-1, inputStream.read()); } final Boolean result = future.get(5, TimeUnit.SECONDS); Assertions.assertNotNull(result); final X509Certificate[] certs = certChainRef.get(); Assertions.assertNotNull(certs); Assertions.assertEquals(2, certs.length); final X509Certificate cert1 = certs[0]; final Principal subjectDN1 = cert1.getSubjectDN(); Assertions.assertNotNull(subjectDN1); Assertions.assertEquals("CN=Test Server, OU=HttpComponents Project, O=Apache Software Foundation", subjectDN1.getName()); final X509Certificate cert2 = certs[1]; final Principal subjectDN2 = cert2.getSubjectDN(); Assertions.assertNotNull(subjectDN2); Assertions.assertEquals("EMAILADDRESS=dev@hc.apache.org, " + "CN=Test CA, OU=HttpComponents Project, O=Apache Software Foundation", subjectDN2.getName()); final Principal issuerDN = cert2.getIssuerDN(); Assertions.assertNotNull(issuerDN); Assertions.assertEquals("EMAILADDRESS=dev@hc.apache.org, " + "CN=Test CA, OU=HttpComponents Project, O=Apache Software Foundation", issuerDN.getName()); } @Test void testSSLHandshakeClientUnauthenticated() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final URL resource2 = getResource("/test-client.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final SSLServerSocket serverSocket = (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.setWantClientAuth(true); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); final Future<Principal> future = this.executorService.submit(() -> { Principal clientPrincipal = null; try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { final SSLSession session = socket.getSession(); try { clientPrincipal = session.getPeerPrincipal(); } catch (final SSLPeerUnverifiedException ignore) { } final OutputStream outputStream = socket.getOutputStream(); outputStream.write(new byte [] {'H', 'i'}); outputStream.flush(); } return clientPrincipal; }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); clientSocket.startHandshake(); final InputStream inputStream = clientSocket.getInputStream(); Assertions.assertEquals('H', inputStream.read()); Assertions.assertEquals('i', inputStream.read()); Assertions.assertEquals(-1, inputStream.read()); } final Principal clientPrincipal = future.get(5, TimeUnit.SECONDS); Assertions.assertNull(clientPrincipal); } @Test void testSSLHandshakeClientUnauthenticatedError() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final URL resource2 = getResource("/test-client.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final SSLServerSocket serverSocket = (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.setNeedClientAuth(true); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); this.executorService.submit(() -> { try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { socket.getSession(); } return Boolean.FALSE; }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); Assertions.assertThrows(IOException.class, () -> { clientSocket.startHandshake(); final InputStream inputStream = clientSocket.getInputStream(); inputStream.read(); }); } } @Test void testSSLHandshakeClientAuthenticated() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource1, storePassword.toCharArray()) .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final URL resource2 = getResource("/test-client.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .loadKeyMaterial(resource2, storePassword.toCharArray(), storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final SSLServerSocket serverSocket = (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.setNeedClientAuth(true); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); final Future<Principal> future = this.executorService.submit(() -> { try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { final SSLSession session = socket.getSession(); final Principal clientPrincipal = session.getPeerPrincipal(); final OutputStream outputStream = socket.getOutputStream(); outputStream.write(new byte[]{'H', 'i'}); outputStream.flush(); return clientPrincipal; } }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); clientSocket.startHandshake(); final InputStream inputStream = clientSocket.getInputStream(); Assertions.assertEquals('H', inputStream.read()); Assertions.assertEquals('i', inputStream.read()); Assertions.assertEquals(-1, inputStream.read()); } final Principal clientPrincipal = future.get(5, TimeUnit.SECONDS); Assertions.assertNotNull(clientPrincipal); } @Test void testSSLHandshakeClientAuthenticatedPrivateKeyStrategy() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource1, storePassword.toCharArray()) .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final PrivateKeyStrategy privateKeyStrategy = (aliases, sslParameters) -> aliases.containsKey("client2") ? "client2" : null; final URL resource2 = getResource("/test-client.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .loadKeyMaterial(resource2, storePassword.toCharArray(), storePassword.toCharArray(), privateKeyStrategy) .build(); Assertions.assertNotNull(clientSslContext); final SSLServerSocket serverSocket = (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.setNeedClientAuth(true); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); final Future<Principal> future = this.executorService.submit(() -> { try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { final SSLSession session = socket.getSession(); final Principal clientPrincipal = session.getPeerPrincipal(); final OutputStream outputStream = socket.getOutputStream(); outputStream.write(new byte[]{'H', 'i'}); outputStream.flush(); return clientPrincipal; } }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); clientSocket.startHandshake(); final InputStream inputStream = clientSocket.getInputStream(); Assertions.assertEquals('H', inputStream.read()); Assertions.assertEquals('i', inputStream.read()); Assertions.assertEquals(-1, inputStream.read()); } final Principal clientPrincipal = future.get(5, TimeUnit.SECONDS); Assertions.assertNotNull(clientPrincipal); Assertions.assertEquals("CN=Test Client 2,OU=HttpComponents Project,O=Apache Software Foundation", clientPrincipal.getName()); } @Test void testSSLHandshakeProtocolMismatch1() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final URL resource2 = getResource("/test-client.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final SSLServerSocket serverSocket = (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); final Set<String> supportedServerProtocols = new LinkedHashSet<>(Arrays.asList(serverSocket.getSupportedProtocols())); Assertions.assertTrue(supportedServerProtocols.contains("TLSv1")); serverSocket.setEnabledProtocols(new String[] {"TLSv1"}); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); this.executorService.submit(() -> { try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { socket.getSession(); } return Boolean.FALSE; }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { final Set<String> supportedClientProtocols = new LinkedHashSet<>(Arrays.asList(clientSocket.getSupportedProtocols())); Assertions.assertTrue(supportedClientProtocols.contains("SSLv3")); clientSocket.setEnabledProtocols(new String[] {"SSLv3"} ); clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); if (isWindows()) { Assertions.assertThrows(IOException.class, clientSocket::startHandshake); } else { Assertions.assertThrows(SSLException.class, clientSocket::startHandshake); } } } @Test void testSSLHandshakeProtocolMismatch2() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final URL resource2 = getResource("/test-client.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final SSLServerSocket serverSocket = (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); final Set<String> supportedServerProtocols = new LinkedHashSet<>(Arrays.asList(serverSocket.getSupportedProtocols())); Assertions.assertTrue(supportedServerProtocols.contains("SSLv3")); serverSocket.setEnabledProtocols(new String[] {"SSLv3"}); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); this.executorService.submit(() -> { try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { socket.getSession(); } return Boolean.FALSE; }); final int localPort = serverSocket.getLocalPort(); try (final SSLSocket clientSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket()) { final Set<String> supportedClientProtocols = new LinkedHashSet<>( Arrays.asList(clientSocket.getSupportedProtocols())); Assertions.assertTrue(supportedClientProtocols.contains("TLSv1")); clientSocket.setEnabledProtocols(new String[]{"TLSv1"}); clientSocket.connect(new InetSocketAddress("localhost", localPort), TIMEOUT.toMillisecondsIntBound()); clientSocket.setSoTimeout(TIMEOUT.toMillisecondsIntBound()); if (isWindows()) { Assertions.assertThrows(IOException.class, clientSocket::startHandshake); } else { Assertions.assertThrows(SSLException.class, clientSocket::startHandshake); } } } @Test void testJSSEEndpointIdentification() throws Exception { final URL resource1 = getResource("/test-server.p12"); final String storePassword = "nopassword"; final String keyPassword = "nopassword"; final SSLContext serverSslContext = SSLContextBuilder.create() .loadKeyMaterial(resource1, storePassword.toCharArray(), keyPassword.toCharArray()) .build(); Assertions.assertNotNull(serverSslContext); final URL resource2 = getResource("/test-client.p12"); final SSLContext clientSslContext = SSLContextBuilder.create() .loadTrustMaterial(resource2, storePassword.toCharArray()) .build(); Assertions.assertNotNull(clientSslContext); final SSLServerSocket serverSocket = (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); serverSocket.bind(new InetSocketAddress(0)); this.executorService = Executors.newSingleThreadExecutor(); this.executorService.submit(() -> { for (;;) { try (SSLSocket socket = (SSLSocket) serverSocket.accept()) { socket.getSession(); socket.shutdownOutput(); } catch (final IOException ex) { return Boolean.FALSE; } } }); final int localPort1 = serverSocket.getLocalPort(); try (final Socket clientSocket = new Socket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort1)); try (SSLSocket sslSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket(clientSocket, "localhost", -1, true)) { final SSLParameters sslParameters = sslSocket.getSSLParameters(); sslParameters.setEndpointIdentificationAlgorithm("HTTPS"); sslSocket.setSSLParameters(sslParameters); sslSocket.startHandshake(); } } final int localPort2 = serverSocket.getLocalPort(); try (final Socket clientSocket = new Socket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort2)); try (SSLSocket sslSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket(clientSocket, "otherhost", -1, true)) { final SSLParameters sslParameters = sslSocket.getSSLParameters(); sslParameters.setEndpointIdentificationAlgorithm(null); sslSocket.setSSLParameters(sslParameters); sslSocket.startHandshake(); } } final int localPort3 = serverSocket.getLocalPort(); Assertions.assertThrows(SSLException.class, () -> { try (final Socket clientSocket = new Socket()) { clientSocket.connect(new InetSocketAddress("localhost", localPort3)); try (SSLSocket sslSocket = (SSLSocket) clientSslContext.getSocketFactory().createSocket(clientSocket, "otherhost", -1, true)) { final SSLParameters sslParameters = sslSocket.getSSLParameters(); sslParameters.setEndpointIdentificationAlgorithm("HTTPS"); sslSocket.setSSLParameters(sslParameters); sslSocket.startHandshake(); } } }); } }
apache/jackrabbit-oak
36,898
oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/Commit.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.Objects; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.TimeUnit; import org.apache.jackrabbit.oak.commons.collections.IterableUtils; import org.apache.jackrabbit.oak.commons.collections.ListUtils; import org.apache.jackrabbit.oak.commons.json.JsopStream; import org.apache.jackrabbit.oak.commons.json.JsopWriter; import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key; import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.util.Objects.requireNonNull; import static java.util.Collections.singletonList; import static org.apache.jackrabbit.oak.plugins.document.Collection.JOURNAL; import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES; import static org.apache.jackrabbit.oak.plugins.document.Document.MOD_COUNT; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.COLLISIONS; import static org.apache.jackrabbit.oak.plugins.document.NodeDocument.SPLIT_CANDIDATE_THRESHOLD; /** * A higher level object representing a commit. */ public class Commit { private static final Logger LOG = LoggerFactory.getLogger(Commit.class); private static final String PROPERTY_NAME_CHILDORDER = ":childOrder"; protected final DocumentNodeStore nodeStore; private final RevisionVector baseRevision; private final RevisionVector startRevisions; private final Revision revision; private final HashMap<Path, UpdateOp> operations = new LinkedHashMap<>(); private final Set<Revision> collisions = new LinkedHashSet<Revision>(); private Branch b; private Rollback rollback = Rollback.NONE; /** * List of all node paths which have been modified in this commit. In addition to the nodes * which are actually changed it also contains there parent node paths */ private final HashSet<Path> modifiedNodes = new HashSet<>(); private final HashSet<Path> addedNodes = new HashSet<>(); private final HashSet<Path> removedNodes = new HashSet<>(); /** Set of all nodes which have binary properties. **/ private final HashSet<Path> nodesWithBinaries = new HashSet<>(); private final HashMap<Path, Path> bundledNodes = new HashMap<>(); /** * Create a new Commit. * * @param nodeStore the node store. * @param revision the revision for this commit. * @param baseRevision the base revision for this commit or {@code null} if * there is none. * @param startRevisions the revisions for each cluster node corresponding * to the start time of the cluster nodes. */ Commit(@NotNull DocumentNodeStore nodeStore, @NotNull Revision revision, @Nullable RevisionVector baseRevision, @NotNull RevisionVector startRevisions) { this.nodeStore = requireNonNull(nodeStore); this.revision = requireNonNull(revision); this.baseRevision = baseRevision; this.startRevisions = startRevisions; } Commit(@NotNull DocumentNodeStore nodeStore, @NotNull Revision revision, @Nullable RevisionVector baseRevision, @NotNull RevisionVector startRevisions, @NotNull Map<Path, UpdateOp> operations, @NotNull Set<Path> addedNodes, @NotNull Set<Path> removedNodes, @NotNull Set<Path> nodesWithBinaries, @NotNull Map<Path, Path> bundledNodes) { this(nodeStore, revision, baseRevision, startRevisions); this.operations.putAll(operations); this.addedNodes.addAll(addedNodes); this.removedNodes.addAll(removedNodes); this.nodesWithBinaries.addAll(nodesWithBinaries); this.bundledNodes.putAll(bundledNodes); } UpdateOp getUpdateOperationForNode(Path path) { UpdateOp op = operations.get(path); if (op == null) { op = createUpdateOp(path, revision, isBranchCommit()); operations.put(path, op); } return op; } static UpdateOp createUpdateOp(Path path, Revision revision, boolean isBranch) { String id = Utils.getIdFromPath(path); UpdateOp op = new UpdateOp(id, false); NodeDocument.setModified(op, revision); if (isBranch) { NodeDocument.setBranchCommit(op, revision); } return op; } /** * The revision for this new commit. That is, the changes within this commit * will be visible with this revision. * * @return the revision for this new commit. */ @NotNull Revision getRevision() { return revision; } /** * Returns the base revision for this commit. That is, the revision passed * to {@link DocumentNodeStore#newCommit}. The base revision may be * <code>null</code>, e.g. for the initial commit of the root node, when * there is no base revision. * * @return the base revision of this commit or <code>null</code>. */ @Nullable RevisionVector getBaseRevision() { return baseRevision; } /** * @return all modified paths, including ancestors without explicit * modifications. */ @NotNull Iterable<Path> getModifiedPaths() { return modifiedNodes; } boolean isEmpty() { return operations.isEmpty(); } /** * Performs a rollback of this commit if necessary. * * @return {@code false} if a rollback was necessary and the rollback did * not complete successfully, {@code true} otherwise. */ boolean rollback() { boolean success = false; try { rollback.perform(this.nodeStore.getDocumentStore()); success = true; } catch (Throwable t) { // catch any exception caused by the rollback and log it LOG.warn("Rollback failed", t); } return success; } /** * Applies this commit to the store. * * @throws ConflictException if the commit failed because of a conflict. * @throws DocumentStoreException if the commit cannot be applied. */ void apply() throws ConflictException, DocumentStoreException { boolean success = false; RevisionVector baseRev = getBaseRevision(); boolean isBranch = baseRev != null && baseRev.isBranch(); Revision rev = getRevision(); if (isBranch && !nodeStore.isDisableBranches()) { try { // prepare commit prepare(baseRev); success = true; } finally { if (!success) { rollback(); Branch branch = getBranch(); if (branch != null) { branch.removeCommit(rev.asBranchRevision()); if (!branch.hasCommits()) { nodeStore.getBranches().remove(branch); } } } } } else { applyInternal(); } } /** * Apply the changes to the document store and the cache. */ private void applyInternal() throws ConflictException, DocumentStoreException { if (!operations.isEmpty()) { updateParentChildStatus(); updateBinaryStatus(); applyToDocumentStore(); } } private void prepare(RevisionVector baseRevision) throws ConflictException, DocumentStoreException { if (!operations.isEmpty()) { updateParentChildStatus(); updateBinaryStatus(); applyToDocumentStoreWithTiming(baseRevision); } } /** * Update the binary status in the update op. */ private void updateBinaryStatus() { for (Path path : this.nodesWithBinaries) { NodeDocument.setHasBinary(getUpdateOperationForNode(path)); } } /** * Apply the changes to the document store. */ void applyToDocumentStore() throws ConflictException, DocumentStoreException { applyToDocumentStoreWithTiming(null); } /** * Apply the changes to the document store. * * @param baseBranchRevision the base revision of this commit. Currently only * used for branch commits. * @throws ConflictException if a conflict is detected with another commit. * @throws DocumentStoreException if an error occurs while writing to the * underlying store. */ private void applyToDocumentStoreWithTiming(RevisionVector baseBranchRevision) throws ConflictException, DocumentStoreException { long start = System.nanoTime(); try { applyToDocumentStore(baseBranchRevision); } finally { nodeStore.getStatsCollector().doneChangesApplied( TimeUnit.NANOSECONDS.toMicros(System.nanoTime() - start)); } } /** * Apply the changes to the document store. * * @param baseBranchRevision the base revision of this commit. Currently only * used for branch commits. * @throws ConflictException if a conflict is detected with another commit. * @throws DocumentStoreException if an error occurs while writing to the * underlying store. */ private void applyToDocumentStore(RevisionVector baseBranchRevision) throws ConflictException, DocumentStoreException { // initially set the rollback to always fail until we have changes // in an oplog list and a commit root. rollback = Rollback.FAILED; // the value in _revisions.<revision> property of the commit root node // regular commits use "c", which makes the commit visible to // other readers. branch commits use the base revision to indicate // the visibility of the commit String commitValue = baseBranchRevision != null ? baseBranchRevision.getBranchRevision().toString() : "c"; DocumentStore store = nodeStore.getDocumentStore(); Path commitRootPath = null; if (baseBranchRevision != null) { // branch commits always use root node as commit root commitRootPath = Path.ROOT; } ArrayList<UpdateOp> changedNodes = new ArrayList<UpdateOp>(); // operations are added to this list before they are executed, // so that all operations can be rolled back if there is a conflict ArrayList<UpdateOp> opLog = new ArrayList<UpdateOp>(); // Compute the commit root for (Path p : operations.keySet()) { markChanged(p); if (commitRootPath == null) { commitRootPath = p; } else { while (!commitRootPath.isAncestorOf(p)) { Path parent = commitRootPath.getParent(); if (parent == null) { break; } commitRootPath = parent; } } } // adjust commit root when it falls on a bundled node commitRootPath = bundledNodes.getOrDefault(commitRootPath, commitRootPath); rollback = new Rollback(revision, opLog, Utils.getIdFromPath(commitRootPath), nodeStore.getCreateOrUpdateBatchSize()); for (Path p : bundledNodes.keySet()){ markChanged(p); } // push branch changes to journal if (baseBranchRevision != null) { // store as external change JournalEntry doc = JOURNAL.newDocument(store); doc.modified(modifiedNodes); Revision r = revision.asBranchRevision(); boolean success = store.create(JOURNAL, singletonList(doc.asUpdateOp(r))); if (!success) { LOG.error("Failed to update journal for revision {}", r); LOG.debug("Failed to update journal for revision {} with doc {}", r, doc.format()); } } int commitRootDepth = commitRootPath.getDepth(); // check if there are real changes on the commit root boolean commitRootHasChanges = operations.containsKey(commitRootPath); for (UpdateOp op : operations.values()) { NodeDocument.setCommitRoot(op, revision, commitRootDepth); // special case for :childOrder updates if (nodeStore.isChildOrderCleanupEnabled()) { final Branch localBranch = getBranch(); if (localBranch != null) { final NavigableSet<Revision> commits = new TreeSet<>(localBranch.getCommits()); boolean removePreviousSetOperations = false; for (Map.Entry<Key, Operation> change : op.getChanges().entrySet()) { if (PROPERTY_NAME_CHILDORDER.equals(change.getKey().getName()) && Operation.Type.SET_MAP_ENTRY == change.getValue().type) { // we are setting child order, so we should remove previous set operations from the same branch removePreviousSetOperations = true; // branch.getCommits contains all revisions of the branch // including the new one we're about to make // so don't do a removeMapEntry for that commits.remove(change.getKey().getRevision().asBranchRevision()); } } if (removePreviousSetOperations) { if (!commits.isEmpty()) { int countRemoves = 0; for (Revision rev : commits.descendingSet()) { op.removeMapEntry(PROPERTY_NAME_CHILDORDER, rev.asTrunkRevision()); if (++countRemoves >= 256) { LOG.debug("applyToDocumentStore : only cleaning up last {} branch commits.", countRemoves); break; } } LOG.debug("applyToDocumentStore : childOrder-edited op is: {}", op); } } } } changedNodes.add(op); } // create a "root of the commit" if there is none UpdateOp commitRoot = getUpdateOperationForNode(commitRootPath); boolean success = false; try { opLog.addAll(changedNodes); if (conditionalCommit(changedNodes, commitValue)) { success = true; } else { int batchSize = nodeStore.getCreateOrUpdateBatchSize(); for (List<UpdateOp> updates : ListUtils.partitionList(changedNodes, batchSize)) { List<NodeDocument> oldDocs = store.createOrUpdate(NODES, updates); checkConflicts(oldDocs, updates); checkSplitCandidate(oldDocs); } // finally write the commit root (the commit root might be written // twice, first to check if there was a conflict, and only then to // commit the revision, with the revision property set) NodeDocument.setRevision(commitRoot, revision, commitValue); if (commitRootHasChanges) { // remove previously added commit root NodeDocument.removeCommitRoot(commitRoot, revision); } opLog.add(commitRoot); if (baseBranchRevision == null) { // create a clone of the commitRoot in order // to set isNew to false. If we get here the // commitRoot document already exists and // only needs an update UpdateOp commit = commitRoot.copy(); commit.setNew(false); // only set revision on commit root when there is // no collision for this commit revision commit.containsMapEntry(COLLISIONS, revision, false); NodeDocument before = nodeStore.updateCommitRoot(commit, revision); if (before == null) { String msg = "Conflicting concurrent change. " + "Update operation failed: " + commit; NodeDocument commitRootDoc = store.find(NODES, commit.getId()); if (commitRootDoc == null) { throw new DocumentStoreException(msg); } else { throw new ConflictException(msg, commitRootDoc.getConflictsFor( Collections.singleton(revision))); } } else { success = true; // if we get here the commit was successful and // the commit revision is set on the commitRoot // document for this commit. // now check for conflicts/collisions by other commits. // use original commitRoot operation with // correct isNew flag. checkConflicts(commitRoot, before); checkSplitCandidate(before); } } else { // this is a branch commit, do not fail on collisions now // trying to merge the branch will fail later createOrUpdateNode(store, commitRoot); } } } catch (Exception e) { // OAK-3084 do not roll back if already committed if (success) { LOG.error("Exception occurred after commit. Rollback will be suppressed.", e); } else { if (e instanceof ConflictException) { throw e; } else { throw DocumentStoreException.convert(e); } } } finally { if (success) { rollback = Rollback.NONE; } } } private boolean conditionalCommit(List<UpdateOp> changedNodes, String commitValue) throws DocumentStoreException { // conditional commit is only possible when not on a branch // and commit root is on the same document as the changes if (!Utils.isCommitted(commitValue) || changedNodes.size() != 1) { return false; } UpdateOp op = changedNodes.get(0); DocumentStore store = nodeStore.getDocumentStore(); NodeDocument doc = store.getIfCached(NODES, op.getId()); if (doc == null || doc.getModCount() == null) { // document not in cache or store does not maintain modCount return false; } try { checkConflicts(op, doc); } catch (ConflictException e) { // remove collision marker again removeCollisionMarker(op.getId()); return false; } // if we get here, update based on current doc does not conflict // create a new commit update operation, setting the revisions // commit entry together with the other changes UpdateOp commit = op.copy(); NodeDocument.unsetCommitRoot(commit, revision); NodeDocument.setRevision(commit, revision, commitValue); // make the update conditional on the modCount commit.equals(MOD_COUNT, doc.getModCount()); NodeDocument before = nodeStore.updateCommitRoot(commit, revision); if (before != null) { checkSplitCandidate(before); } return before != null; } private void removeCollisionMarker(String id) { UpdateOp removeCollision = new UpdateOp(id, false); NodeDocument.removeCollision(removeCollision, revision); nodeStore.getDocumentStore().findAndUpdate(NODES, removeCollision); } private void updateParentChildStatus() { final Set<Path> processedParents = new HashSet<>(); for (Path path : addedNodes) { Path parentPath = path.getParent(); if (parentPath == null) { continue; } if (processedParents.contains(parentPath)) { continue; } //Ignore setting children path for bundled nodes if (isBundled(parentPath)){ continue; } processedParents.add(parentPath); UpdateOp op = getUpdateOperationForNode(parentPath); NodeDocument.setChildrenFlag(op, true); } } /** * Try to create or update the node. If there was a conflict, this method * throws a {@link ConflictException}, even though the change is still applied. * * @param store the store * @param op the operation * @throws ConflictException if there was a conflict introduced by the * given update operation. */ private void createOrUpdateNode(DocumentStore store, UpdateOp op) throws ConflictException, DocumentStoreException { NodeDocument doc = store.createOrUpdate(NODES, op); checkConflicts(op, doc); checkSplitCandidate(doc); } private void checkSplitCandidate(Iterable<NodeDocument> docs) { for (NodeDocument doc : docs) { checkSplitCandidate(doc); } } private void checkSplitCandidate(@Nullable NodeDocument doc) { if (doc == null) { return; } if (doc.getMemory() > SPLIT_CANDIDATE_THRESHOLD || doc.hasBinary()) { nodeStore.addSplitCandidate(doc.getId()); } } /** * Checks if the update operation introduced any conflicts on the given * document. The document shows the state right before the operation was * applied. * * @param op the update operation. * @param before how the document looked before the update was applied or * {@code null} if it didn't exist before. * @throws ConflictException if there was a conflict introduced by the * given update operation. */ private void checkConflicts(@NotNull UpdateOp op, @Nullable NodeDocument before) throws ConflictException { DocumentStore store = nodeStore.getDocumentStore(); collisions.clear(); if (baseRevision != null) { Revision newestRev = null; Branch branch = null; if (before != null) { RevisionVector base = baseRevision; if (nodeStore.isDisableBranches()) { base = base.asTrunkRevision(); } branch = getBranch(); newestRev = before.getNewestRevision( nodeStore, base, revision, branch, collisions); } String conflictMessage = null; Set<Revision> conflictRevisions = new HashSet<>(); if (newestRev == null) { if ((op.isDelete() || !op.isNew()) && !allowConcurrentAddRemove(before, op)) { conflictMessage = "The node " + op.getId() + " does not exist or is already deleted " + "at base revision " + baseRevision + ", branch: " + branch; if (before != null && !before.getLocalDeleted().isEmpty()) { conflictRevisions.add(before.getLocalDeleted().firstKey()); } } } else { conflictRevisions.add(newestRev); if (op.isNew() && !allowConcurrentAddRemove(before, op)) { conflictMessage = "The node " + op.getId() + " already existed in revision\n" + formatConflictRevision(newestRev); } else if (baseRevision.isRevisionNewer(newestRev) && (op.isDelete() || isConflicting(before, op))) { conflictMessage = "The node " + op.getId() + " was changed in revision\n" + formatConflictRevision(newestRev) + ", which was applied after the base revision\n" + baseRevision; } } if (conflictMessage == null && before != null) { // the modification was successful // -> check for collisions and conflict (concurrent updates // on a node are possible if property updates do not overlap) // TODO: unify above conflict detection and isConflicting() boolean allowConflictingDeleteChange = allowConcurrentAddRemove(before, op); for (Revision r : collisions) { Collision c = new Collision(before, r, op, revision, nodeStore, startRevisions); if (c.isConflicting() && !allowConflictingDeleteChange) { // mark collisions on commit root if (c.mark(store).equals(revision)) { // our revision was marked if (baseRevision.isBranch()) { // this is a branch commit. do not fail immediately // merging this branch will fail later. } else { // fail immediately conflictMessage = "The node " + op.getId() + " was changed in revision\n" + formatConflictRevision(r) + ", which was applied after the base revision\n" + baseRevision; conflictRevisions.add(r); } } } } } if (conflictMessage != null) { conflictMessage += ", commit revision: " + revision; if (LOG.isDebugEnabled()) { LOG.debug(conflictMessage + "; document:\n" + (before == null ? "" : before.format())); } throw new ConflictException(conflictMessage, conflictRevisions); } } } private void checkConflicts(List<NodeDocument> oldDocs, List<UpdateOp> updates) throws ConflictException { int i = 0; List<ConflictException> exceptions = new ArrayList<ConflictException>(); Set<Revision> revisions = new HashSet<Revision>(); for (NodeDocument doc : oldDocs) { UpdateOp op = updates.get(i++); try { checkConflicts(op, doc); } catch (ConflictException e) { exceptions.add(e); e.getConflictRevisions().forEach(revisions::add); } } if (!exceptions.isEmpty()) { throw new ConflictException("Following exceptions occurred during the bulk update operations: " + exceptions, revisions); } } private String formatConflictRevision(Revision r) { if (nodeStore.getHeadRevision().isRevisionNewer(r)) { return r + " (not yet visible)"; } else if (baseRevision != null && !baseRevision.isRevisionNewer(r) && !Objects.equals(baseRevision.getRevision(r.getClusterId()), r)) { return r + " (older than base " + baseRevision + ")"; } else { return r.toString(); } } /** * Checks whether the given <code>UpdateOp</code> conflicts with the * existing content in <code>doc</code>. The check is done based on the * {@link #baseRevision} of this commit. An <code>UpdateOp</code> conflicts * when there were changes after {@link #baseRevision} on properties also * contained in <code>UpdateOp</code>. * * @param doc the contents of the nodes before the update. * @param op the update to perform. * @return <code>true</code> if the update conflicts; <code>false</code> * otherwise. */ private boolean isConflicting(@Nullable NodeDocument doc, @NotNull UpdateOp op) { if (baseRevision == null || doc == null) { // no conflict is possible when there is no baseRevision // or document did not exist before return false; } return doc.isConflicting(op, baseRevision, revision, nodeStore.getEnableConcurrentAddRemove()); } /** * Checks whether a concurrent add/remove operation is allowed with the * given before document and update operation. This method will first check * if the concurrent add/remove feature is enable and return {@code false} * immediately if it is disabled. Only when enabled will this method check * if there is a conflict based on the given document and update operation. * See also {@link #isConflicting(NodeDocument, UpdateOp)}. * * @param before the contents of the document before the update. * @param op the update to perform. * @return {@code true} is a concurrent add/remove update is allowed; * {@code false} otherwise. */ private boolean allowConcurrentAddRemove(@Nullable NodeDocument before, @NotNull UpdateOp op) { return nodeStore.getEnableConcurrentAddRemove() && !isConflicting(before, op); } /** * @return the branch if this is a branch commit, otherwise {@code null}. */ @Nullable private Branch getBranch() { if (baseRevision == null || !baseRevision.isBranch()) { return null; } if (b == null) { b = nodeStore.getBranches().getBranch( new RevisionVector(revision.asBranchRevision())); } return b; } /** * @return {@code true} if this is a branch commit. */ private boolean isBranchCommit() { return baseRevision != null && baseRevision.isBranch(); } /** * Applies the lastRev updates to the {@link LastRevTracker} of the * DocumentNodeStore. * * @param isBranchCommit whether this is a branch commit. */ void applyLastRevUpdates(boolean isBranchCommit) { LastRevTracker tracker = nodeStore.createTracker(revision, isBranchCommit); for (Path path : modifiedNodes) { UpdateOp op = operations.get(path); // track _lastRev only when path is not for a bundled node state if ((op == null || !hasContentChanges(op) || path.isRoot()) && !isBundled(path)) { // track intermediate node and root tracker.track(path); } } } /** * Apply the changes to the DocumentNodeStore (to update the cache). * * @param before the revision right before this commit. * @param isBranchCommit whether this is a commit to a branch */ public void applyToCache(RevisionVector before, boolean isBranchCommit) { HashMap<Path, ArrayList<Path>> nodesWithChangedChildren = new HashMap<>(); for (Path p : modifiedNodes) { if (p.isRoot()) { continue; } Path parent = p.getParent(); ArrayList<Path> list = nodesWithChangedChildren .computeIfAbsent(parent, k -> new ArrayList<>()); list.add(p); } // the commit revision with branch flag if this is a branch commit Revision rev = isBranchCommit ? revision.asBranchRevision() : revision; RevisionVector after = before.update(rev); DiffCache.Entry cacheEntry = nodeStore.getDiffCache().newEntry(before, after, true); List<Path> added = new ArrayList<>(); List<Path> removed = new ArrayList<>(); List<Path> changed = new ArrayList<>(); for (Path path : modifiedNodes) { added.clear(); removed.clear(); changed.clear(); ArrayList<Path> changes = nodesWithChangedChildren.get(path); if (changes != null) { for (Path s : changes) { if (addedNodes.contains(s)) { added.add(s); } else if (removedNodes.contains(s)) { removed.add(s); } else { changed.add(s); } } } UpdateOp op = operations.get(path); // apply to cache only when path is not for a bundled node state if (!isBundled(path)) { boolean isNew = op != null && op.isNew(); nodeStore.applyChanges(before, after, rev, path, isNew, added, removed, changed); } addChangesToDiffCacheEntry(path, added, removed, changed, cacheEntry); } cacheEntry.done(); } void markChanged(Path path) { while (true) { if (!modifiedNodes.add(path)) { break; } path = path.getParent(); if (path == null) { break; } } } @NotNull RevisionVector getStartRevisions() { return startRevisions; } /** * Apply the changes of a node to the cache. * * @param path the path * @param added the list of added child nodes * @param removed the list of removed child nodes * @param changed the list of changed child nodes * @param cacheEntry the cache entry changes are added to */ private void addChangesToDiffCacheEntry(Path path, List<Path> added, List<Path> removed, List<Path> changed, DiffCache.Entry cacheEntry) { // update diff cache JsopWriter w = new JsopStream(); for (Path p : added) { w.tag('+').key(p.getName()).object().endObject(); } for (Path p : removed) { w.tag('-').value(p.getName()); } for (Path p : changed) { w.tag('^').key(p.getName()).object().endObject(); } cacheEntry.append(path, w.toString()); } private boolean isBundled(Path path) { return bundledNodes.containsKey(path); } private static boolean hasContentChanges(UpdateOp op) { return IterableUtils.filter(IterableUtils.transform(op.getChanges().keySet(), Key::getName), Utils.PROPERTY_OR_DELETED::test).iterator().hasNext(); } }
apache/juneau
35,384
juneau-core/juneau-assertions/src/main/java/org/apache/juneau/assertions/Assertions.java
// *************************************************************************************************************************** // * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file * // * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * // * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * // * with the License. You may obtain a copy of the License at * // * * // * http://www.apache.org/licenses/LICENSE-2.0 * // * * // * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an * // * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * // * specific language governing permissions and limitations under the License. * // *************************************************************************************************************************** package org.apache.juneau.assertions; import static org.apache.juneau.common.utils.IOUtils.*; import java.io.*; import java.time.*; import java.util.*; import java.util.stream.*; import org.apache.juneau.*; import org.apache.juneau.common.utils.*; /** * Main class for creation of assertions for stand-alone testing. * * <p> * Provides assertions for various common POJO types. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Assert string is greater than 100 characters and contains "foo".</jc> * <jsm>assertString</jsm>(<jv>myString</jv>) * .length().isGt(100) * .contains(<js>"foo"</js>); * </p> * * <p> * Provides simple testing that {@link Throwable Throwables} are being thrown correctly. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Assert that calling doBadCall() causes a RuntimeException.</jc> * <jsm>assertThrown</jsm>(() -&gt; <jv>myPojo</jv>.doBadCall()) * .isType(RuntimeException.<jk>class</jk>) * .message().contains(<js>"Bad thing happened."</js>); * </p> * * <p> * Provides other assertion convenience methods such as asserting non-null method arguments. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jk>public</jk> String getFoo(String <jv>bar</jv>) { * <jsm>assertArgNotNull</jsm>(<js>"bar"</js>, <jv>bar</jv>); * ... * } * </p> * * <h5 class='section'>See Also:</h5> * <ul> * <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> * </ul> */ public class Assertions { /** * Constructor. */ protected Assertions() {} //----------------------------------------------------------------------------------------------------------------- // Fluent assertions //----------------------------------------------------------------------------------------------------------------- /** * Performs an assertion on an arbitrary POJO. * * <p> * The distinction between {@link ObjectAssertion} and {@link AnyAssertion} is that the latter supports all * the operations of the former, but adds various transform methods for conversion to specific assertion types. * * <p> * Various transform methods such as {@link FluentListAssertion#asItem(int)} and {@link FluentBeanAssertion#asProperty(String)} * return generic any-assertions so that they can be easily transformed into other assertion types. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that the property 'foo' of a bean is 'bar'.</jc> * <jsm>assertAny</jsm>(<jv>myPojo</jv>) <jc>// Start with AnyAssertion.</jc> * .asBean(MyBean.<jk>class</jk>) <jc>// Transform to BeanAssertion.</jc> * .property(<js>"foo"</js>).is(<js>"bar"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link AnyAssertion} for supported operations on this type. * * @param <T> The value type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <T> AnyAssertion<T> assertAny(T value) { return AnyAssertion.create(value); } /** * Performs an assertion on an array of POJOs. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that an Integer array contains [1,2,3].</jc> * Integer[] <jv>array</jv> = {...}; * <jsm>assertArray</jsm>(<jv>array</jv>) * .asJson().is(<js>"[1,2,3]"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ArrayAssertion} for supported operations on this type. * * @param <E> The value element type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <E> ArrayAssertion<E> assertArray(E[] value) { return ArrayAssertion.create(value); } /** * Performs an assertion on a Java bean. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that the 'foo' and 'bar' properties of a bean are 1 and 2 respectively.</jc> * <jsm>assertBean</jsm>(<jv>myBean</jv>) * .isType(MyBean.<jk>class</jk>) * .extract(<js>"foo,bar"</js>) * .asJson().is(<js>"{foo:1,bar:2}"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link BeanAssertion} for supported operations on this type. * * @param <T> The value type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <T> BeanAssertion<T> assertBean(T value) { return BeanAssertion.create(value); } /** * Performs an assertion on a list of Java beans. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a bean list has 3 entries with 'foo' property values of 'bar','baz','qux'.</jc> * <jsm>assertBeanList</jsm>(<jv>myListOfBeans</jv>) * .isSize(3) * .property(<js>"foo"</js>) * .is(<js>"bar"</js>,<js>"baz"</js>,<js>"qux"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link BeanListAssertion} for supported operations on this type. * * @param <E> The element type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <E> BeanListAssertion<E> assertBeanList(List<E> value) { return BeanListAssertion.create(value); } /** * Performs an assertion on a Boolean. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a Boolean is not null and TRUE.</jc> * <jsm>assertBoolean</jsm>(<jv>myBoolean</jv>) * .isTrue(); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link BooleanAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final BooleanAssertion assertBoolean(Boolean value) { return BooleanAssertion.create(value); } /** * Performs an assertion on a boolean array. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a Boolean array has size of 3 and all entries are TRUE.</jc> * <jsm>assertBooleanArray</jsm>(<jv>myBooleanArray</jv>) * .isSize(3) * .all(<jv>x</jv> -&gt; <jv>x</jv> == <jk>true</jk>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Boolean,boolean[]> assertBooleanArray(boolean[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on a byte array. * * <p> * The distinction between {@link #assertByteArray} and {@link #assertBytes} is that the former returns an assertion * more tied to general byte arrays and the latter returns an assertion more tied to dealing with binary streams * that can be decoded or transformed into a string. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a byte array has size of 3 and all bytes are larger than 10.</jc> * <jsm>assertByteArray</jsm>(<jv>myByteArray</jv>) * .isSize(3) * .all(<jv>x</jv> -&gt; <jv>x</jv> &gt; 10); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Byte,byte[]> assertByteArray(byte[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on a byte array. * * <p> * The distinction between {@link #assertByteArray} and {@link #assertBytes} is that the former returns an assertion * more tied to general byte arrays and the latter returns an assertion more tied to dealing with binary streams * that can be decoded or transformed into a string. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that the byte array contains the string "foo".</jc> * <jsm>assertBytes</jsm>(<jv>myBytes</jv>) * .asHex().is(<js>"666F6F"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ByteArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final ByteArrayAssertion assertBytes(byte[] value) { return ByteArrayAssertion.create(value); } /** * Performs an assertion on the contents of an input stream. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that the stream contains the string "foo".</jc> * <jsm>assertBytes</jsm>(<jv>myStream</jv>) * .asHex().is(<js>"666F6F"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ByteArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * <br>Stream is automatically closed. * @return * A new assertion object. * <br>Never <jk>null</jk>. * @throws IOException If thrown while reading contents from stream. */ public static final ByteArrayAssertion assertBytes(InputStream value) throws IOException { return assertBytes(value == null ? null : readBytes(value)); } /** * Performs an assertion on a char array. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that the char array contains the string "foo".</jc> * <jsm>assertCharArray</jsm>(<jv>myCharArray</jv>) * .asString().is(<js>"foo"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Character,char[]> assertCharArray(char[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on a collection of POJOs. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a collection of strings has only one entry of 'foo'.</jc> * <jsm>assertCollection</jsm>(<jv>myCollectionOfStrings</jv>) * .isSize(1) * .contains(<js>"foo"</js>); * </p> * * <p> * In general, use {@link #assertList(List)} if you're performing an assertion on a list since {@link ListAssertion} * provides more functionality than {@link CollectionAssertion}. * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link CollectionAssertion} for supported operations on this type. * * @param <E> The element type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <E> CollectionAssertion<E> assertCollection(Collection<E> value) { return CollectionAssertion.create(value); } /** * Performs an assertion on a Comparable. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts a comparable is less than another comparable.</jc> * <jsm>assertComparable</jsm>(<jv>myComparable</jv>) * .isLt(<jv>anotherComparable</jv>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ComparableAssertion} for supported operations on this type. * * @param <T> The value type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <T extends Comparable<T>> ComparableAssertion<T> assertComparable(T value) { return ComparableAssertion.create(value); } /** * Performs an assertion on a Date. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts the specified date is after the current date.</jc> * <jsm>assertDate</jsm>(<jv>myDate</jv>) * .isAfterNow(); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link DateAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final DateAssertion assertDate(Date value) { return DateAssertion.create(value); } /** * Performs an assertion on a double array. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a double array is at least size 100 and all values are greater than 1000.</jc> * <jsm>assertDoubleArray</jsm>(<jv>myDoubleArray</jv>) * .size().isGte(100f) * .all(<jv>x</jv> -&gt; <jv>x</jv> &gt; 1000f); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Double,double[]> assertDoubleArray(double[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on a float array. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a float array is at least size 100 and all values are greater than 1000.</jc> * <jsm>assertFloatArray</jsm>(<jv>myFloatArray</jv>) * .size().isGte(100f) * .all(<jv>x</jv> -&gt; <jv>x</jv> &gt; 1000f); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Float,float[]> assertFloatArray(float[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on an int array. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a double array is at least size 100 and all values are greater than 1000.</jc> * <jsm>assertIntArray</jsm>(<jv>myIntArray</jv>) * .size().isGte(100) * .all(<jv>x</jv> -&gt; <jv>x</jv> &gt; 1000); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Integer,int[]> assertIntArray(int[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on an Integer. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Assert that an HTTP response status code is 200 or 404.</jc> * <jsm>assertInteger</jsm>(<jv>httpReponse</jv>) * .isAny(200,404); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link IntegerAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final IntegerAssertion assertInteger(Integer value) { return IntegerAssertion.create(value); } /** * Performs an assertion on a list of POJOs. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Assert that the first entry in a list is "{foo:'bar'}" when serialized to simplified JSON.</jc> * <jsm>assertList</jsm>(<jv>myList</jv>) * .item(0) * .asJson().is(<js>"{foo:'bar'}"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ListAssertion} for supported operations on this type. * * @param <E> The element type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <E> ListAssertion<E> assertList(List<E> value) { return ListAssertion.create(value); } /** * Performs an assertion on a stream of POJOs. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Assert that the first entry in a list is "{foo:'bar'}" when serialized to simplified JSON.</jc> * <jsm>assertList</jsm>(<jv>myStream</jv>) * .item(0) * .asJson().is(<js>"{foo:'bar'}"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ListAssertion} for supported operations on this type. * * @param <E> The element type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <E> ListAssertion<E> assertList(Stream<E> value) { return ListAssertion.create(value); } /** * Performs an assertion on a Long. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Throw a BadReqest if an HTTP response length is greater than 100k.</jc> * <jsm>assertLong</jsm>(<jv>responseLength</jv>) * .throwable(BadRequest.<jk>class</jk>) * .msg(<js>"Request is too large"</js>) * .isLt(100000); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link LongAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final LongAssertion assertLong(Long value) { return LongAssertion.create(value); } /** * Performs an assertion on a long array. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a long array is at least size 100 and all values are greater than 1000.</jc> * <jsm>assertLongArray</jsm>(<jv>myLongArray</jv>) * .size().isGte(100) * .all(<jv>x</jv> -&gt; <jv>x</jv> &gt; 1000); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Long,long[]> assertLongArray(long[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on a map. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Assert the specified map is a HashMap and contains the key "foo".</jc> * <jsm>assertMap</jsm>(<jv>myMap</jv>) * .isType(HashMap.<jk>class</jk>) * .containsKey(<js>"foo"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link MapAssertion} for supported operations on this type. * * @param <K> The key type. * @param <V> The value type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <K,V> MapAssertion<K,V> assertMap(Map<K,V> value) { return MapAssertion.create(value); } /** * Performs an assertion on a Java Object. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts the specified POJO is of type MyBean and is "{foo:'bar'}" </jc> * <jc>// when serialized to Simplified JSON.</jc> * <jsm>assertObject</jsm>(<jv>myPojo</jv>) * .isType(MyBean.<jk>class</jk>) * .asJson().is(<js>"{foo:'bar'}"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ObjectAssertion} for supported operations on this type. * * @param <T> The value type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <T> ObjectAssertion<T> assertObject(T value) { return ObjectAssertion.create(value); } /** * Performs an assertion on a Java Object wrapped in an Optional. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts the specified POJO is of type MyBean and is "{foo:'bar'}" </jc> * <jc>// when serialized to Simplified JSON.</jc> * <jsm>assertOptional</jsm>(<jv>opt</jv>) * .isType(MyBean.<jk>class</jk>) * .asJson().is(<js>"{foo:'bar'}"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link AnyAssertion} for supported operations on this type. * * @param <T> The value type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <T> AnyAssertion<T> assertOptional(Optional<T> value) { return AnyAssertion.create(value.orElse(null)); } /** * Performs an assertion on the contents of a Reader. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts the contents of the Reader contains "foo".</jc> * <jsm>assertReader</jsm>(<jv>myReader</jv>) * .contains(<js>"foo"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link StringAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * <br>Reader is automatically closed. * @return * A new assertion object. * <br>Never <jk>null</jk>. * @throws IOException If thrown while reading contents from reader. */ public static final StringAssertion assertReader(Reader value) throws IOException { return assertString(read(value)); } /** * Performs an assertion on a short array. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that a float array is at least size 10 and all values are greater than 100.</jc> * <jsm>assertShortArray</jsm>(<jv>myShortArray</jv>) * .size().isGte(10) * .all(<jv>x</jv> -&gt; <jv>x</jv> &gt; 100); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link PrimitiveArrayAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final PrimitiveArrayAssertion<Short,short[]> assertShortArray(short[] value) { return PrimitiveArrayAssertion.create(value); } /** * Performs an assertion on a String. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts a string is at least 100 characters long and contains "foo".</jc> * <jsm>assertString</jsm>(<jv>myString</jv>) * .size().isGte(100) * .contains(<js>"foo"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link StringAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final StringAssertion assertString(Object value) { if (value instanceof Optional) value = ((Optional<?>)value).orElse(null); return StringAssertion.create(value); } /** * Performs an assertion on a list of Strings. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts a list of strings contain "foo,bar,baz" after trimming all and joining.</jc> * <jsm>assertStringList</jsm>(<jv>myListOfStrings</jv>) * .isSize(3) * .trim() * .join(<js>","</js>) * .is(<js>"foo,bar,baz"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link StringListAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final StringListAssertion assertStringList(List<String> value) { return StringListAssertion.create(value); } /** * Performs an assertion on a Throwable. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts a throwable is a RuntimeException containing 'foobar' in the message.</jc> * <jsm>assertThrowable</jsm>(<jv>throwable</jv>) * .isExactType(RuntimeException.<jk>class</jk>) * .message().contains(<js>"foobar"</js>); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ThrowableAssertion} for supported operations on this type. * * @param <T> The value type. * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final <T extends Throwable> ThrowableAssertion<T> assertThrowable(T value) { return ThrowableAssertion.create(value); } /** * Performs an assertion on a Version. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts the specified major version is at least 2.</jc> * <jsm>assertVersion</jsm>(<jv>version</jv>) * .major().isGte(2); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link VersionAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final VersionAssertion assertVersion(Version value) { return VersionAssertion.create(value); } /** * Performs an assertion on a ZonedDateTime. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts the specified date is after the current date.</jc> * <jsm>assertZonedDateTime</jsm>(<jv>myZonedDateTime</jv>) * .isAfterNow(); * </p> * * <p> * See <a class="doclink" href="https://juneau.apache.org/docs/topics/JuneauEcosystemOverview">Juneau Ecosystem Overview</a> for general assertion usage and {@link ZonedDateTimeAssertion} for supported operations on this type. * * @param value * The object being tested. * <br>Can be <jk>null</jk>. * @return * A new assertion object. * <br>Never <jk>null</jk>. */ public static final ZonedDateTimeAssertion assertZonedDateTime(ZonedDateTime value) { return ZonedDateTimeAssertion.create(value); } //----------------------------------------------------------------------------------------------------------------- // Snippet assertions //----------------------------------------------------------------------------------------------------------------- /** * Executes an arbitrary snippet of code and captures anything thrown from it as a Throwable assertion. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jk>import static</jk> org.apache.juneau.assertions.Assertions.*; * * <jc>// Asserts that the specified method throws a RuntimeException containing "foobar" in the message. </jc> * <jsm>assertThrown</jsm>(()-&gt;<jv>foo</jv>.getBar()) * .isType(RuntimeException.<jk>class</jk>) * .message().contains(<js>"foobar"</js>); * </p> * * @param snippet The snippet of code to execute. * @return A new assertion object. Never <jk>null</jk>. */ public static final ThrowableAssertion<Throwable> assertThrown(Snippet snippet) { try { snippet.run(); } catch (Throwable e) { return assertThrowable(e); } return assertThrowable(null); } }
apache/kafka
37,054
connect/runtime/src/test/java/org/apache/kafka/connect/util/TopicCreationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.connect.util; import org.apache.kafka.clients.admin.NewTopic; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.runtime.ConnectMetrics; import org.apache.kafka.connect.runtime.MockConnectMetrics; import org.apache.kafka.connect.runtime.SourceConnectorConfig; import org.apache.kafka.connect.runtime.TransformationStage; import org.apache.kafka.connect.runtime.WorkerConfig; import org.apache.kafka.connect.runtime.distributed.DistributedConfig; import org.apache.kafka.connect.source.SourceRecord; import org.apache.kafka.connect.storage.StringConverter; import org.apache.kafka.connect.transforms.Cast; import org.apache.kafka.connect.transforms.RegexRouter; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import static org.apache.kafka.common.config.TopicConfig.CLEANUP_POLICY_COMPACT; import static org.apache.kafka.common.config.TopicConfig.CLEANUP_POLICY_CONFIG; import static org.apache.kafka.common.config.TopicConfig.COMPRESSION_TYPE_CONFIG; import static org.apache.kafka.common.config.TopicConfig.RETENTION_MS_CONFIG; import static org.apache.kafka.connect.runtime.ConnectorConfig.CONNECTOR_CLASS_CONFIG; import static org.apache.kafka.connect.runtime.ConnectorConfig.NAME_CONFIG; import static org.apache.kafka.connect.runtime.ConnectorConfigTest.MOCK_PLUGINS; import static org.apache.kafka.connect.runtime.SourceConnectorConfig.TOPIC_CREATION_GROUPS_CONFIG; import static org.apache.kafka.connect.runtime.SourceConnectorConfig.TOPIC_CREATION_PREFIX; import static org.apache.kafka.connect.runtime.TopicCreationConfig.DEFAULT_TOPIC_CREATION_GROUP; import static org.apache.kafka.connect.runtime.TopicCreationConfig.DEFAULT_TOPIC_CREATION_PREFIX; import static org.apache.kafka.connect.runtime.TopicCreationConfig.EXCLUDE_REGEX_CONFIG; import static org.apache.kafka.connect.runtime.TopicCreationConfig.INCLUDE_REGEX_CONFIG; import static org.apache.kafka.connect.runtime.TopicCreationConfig.PARTITIONS_CONFIG; import static org.apache.kafka.connect.runtime.TopicCreationConfig.REPLICATION_FACTOR_CONFIG; import static org.apache.kafka.connect.runtime.WorkerConfig.BOOTSTRAP_SERVERS_CONFIG; import static org.apache.kafka.connect.runtime.WorkerConfig.KEY_CONVERTER_CLASS_CONFIG; import static org.apache.kafka.connect.runtime.WorkerConfig.TOPIC_CREATION_ENABLE_CONFIG; import static org.apache.kafka.connect.runtime.WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG; import static org.apache.kafka.connect.runtime.distributed.DistributedConfig.CONFIG_TOPIC_CONFIG; import static org.apache.kafka.connect.runtime.distributed.DistributedConfig.GROUP_ID_CONFIG; import static org.apache.kafka.connect.runtime.distributed.DistributedConfig.OFFSET_STORAGE_TOPIC_CONFIG; import static org.apache.kafka.connect.runtime.distributed.DistributedConfig.STATUS_STORAGE_TOPIC_CONFIG; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class TopicCreationTest { private static final String FOO_CONNECTOR = "foo-source"; private static final String FOO_GROUP = "foo"; private static final String FOO_TOPIC = "foo-topic"; private static final String FOO_REGEX = ".*foo.*"; private static final String BAR_GROUP = "bar"; private static final String BAR_TOPIC = "bar-topic"; private static final String BAR_REGEX = ".*bar.*"; private static final short DEFAULT_REPLICATION_FACTOR = -1; private static final int DEFAULT_PARTITIONS = -1; private static final ConnectMetrics METRICS = new MockConnectMetrics(); private static final ConnectorTaskId CONNECTOR_TASK_ID = new ConnectorTaskId("test", 0); Map<String, String> workerProps; WorkerConfig workerConfig; Map<String, String> sourceProps; SourceConnectorConfig sourceConfig; @BeforeEach public void setup() { workerProps = defaultWorkerProps(); workerConfig = new DistributedConfig(workerProps); } public Map<String, String> defaultWorkerProps() { Map<String, String> props = new HashMap<>(); props.put(GROUP_ID_CONFIG, "connect-cluster"); props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(CONFIG_TOPIC_CONFIG, "connect-configs"); props.put(OFFSET_STORAGE_TOPIC_CONFIG, "connect-offsets"); props.put(STATUS_STORAGE_TOPIC_CONFIG, "connect-status"); props.put(KEY_CONVERTER_CLASS_CONFIG, StringConverter.class.getName()); props.put(VALUE_CONVERTER_CLASS_CONFIG, StringConverter.class.getName()); props.put(TOPIC_CREATION_ENABLE_CONFIG, String.valueOf(true)); return props; } public Map<String, String> defaultConnectorProps() { Map<String, String> props = new HashMap<>(); props.put(NAME_CONFIG, FOO_CONNECTOR); props.put(CONNECTOR_CLASS_CONFIG, "TestConnector"); return props; } public Map<String, String> defaultConnectorPropsWithTopicCreation() { Map<String, String> props = defaultConnectorProps(); props.put(DEFAULT_TOPIC_CREATION_PREFIX + REPLICATION_FACTOR_CONFIG, String.valueOf(DEFAULT_REPLICATION_FACTOR)); props.put(DEFAULT_TOPIC_CREATION_PREFIX + PARTITIONS_CONFIG, String.valueOf(DEFAULT_PARTITIONS)); return props; } @Test public void testTopicCreationWhenTopicCreationIsEnabled() { sourceProps = defaultConnectorPropsWithTopicCreation(); sourceProps.put(TOPIC_CREATION_GROUPS_CONFIG, String.join(",", FOO_GROUP, BAR_GROUP)); sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertEquals(topicCreation.defaultTopicGroup(), groups.get(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(2, topicCreation.topicGroups().size()); assertEquals(Set.of(FOO_GROUP, BAR_GROUP), topicCreation.topicGroups().keySet()); assertEquals(topicCreation.defaultTopicGroup(), topicCreation.findFirstGroup(FOO_TOPIC)); topicCreation.addTopic(FOO_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); } @Test public void testTopicCreationWhenTopicCreationIsDisabled() { workerProps.put(TOPIC_CREATION_ENABLE_CONFIG, String.valueOf(false)); workerConfig = new DistributedConfig(workerProps); sourceProps = defaultConnectorPropsWithTopicCreation(); sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, TopicCreationGroup.configuredGroups(sourceConfig)); assertFalse(topicCreation.isTopicCreationEnabled()); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertNull(topicCreation.defaultTopicGroup()); assertEquals(Map.of(), topicCreation.topicGroups()); assertNull(topicCreation.findFirstGroup(FOO_TOPIC)); topicCreation.addTopic(FOO_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); } @Test public void testEmptyTopicCreation() { TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, null); assertEquals(TopicCreation.empty(), topicCreation); assertFalse(topicCreation.isTopicCreationEnabled()); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertNull(topicCreation.defaultTopicGroup()); assertEquals(0, topicCreation.topicGroups().size()); assertEquals(Map.of(), topicCreation.topicGroups()); assertNull(topicCreation.findFirstGroup(FOO_TOPIC)); topicCreation.addTopic(FOO_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); } @Test public void withDefaultTopicCreation() { sourceProps = defaultConnectorPropsWithTopicCreation(); // Setting here but they should be ignored for the default group sourceProps.put(TOPIC_CREATION_PREFIX + DEFAULT_TOPIC_CREATION_GROUP + "." + INCLUDE_REGEX_CONFIG, FOO_REGEX); sourceProps.put(TOPIC_CREATION_PREFIX + DEFAULT_TOPIC_CREATION_GROUP + "." + EXCLUDE_REGEX_CONFIG, BAR_REGEX); // verify config creation sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); assertTrue(sourceConfig.usesTopicCreation()); assertEquals(DEFAULT_REPLICATION_FACTOR, (short) sourceConfig.topicCreationReplicationFactor(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(DEFAULT_PARTITIONS, (int) sourceConfig.topicCreationPartitions(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(".*"), sourceConfig.topicCreationInclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(), sourceConfig.topicCreationExclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(Map.of(), sourceConfig.topicCreationOtherConfigs(DEFAULT_TOPIC_CREATION_GROUP)); // verify topic creation group is instantiated correctly Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); assertEquals(1, groups.size()); assertEquals(Set.of(DEFAULT_TOPIC_CREATION_GROUP), groups.keySet()); // verify topic creation TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); TopicCreationGroup group = topicCreation.defaultTopicGroup(); // Default group will match all topics besides empty string assertTrue(group.matches(" ")); assertTrue(group.matches(FOO_TOPIC)); assertEquals(DEFAULT_TOPIC_CREATION_GROUP, group.name()); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertEquals(Map.of(), topicCreation.topicGroups()); assertEquals(topicCreation.defaultTopicGroup(), topicCreation.findFirstGroup(FOO_TOPIC)); topicCreation.addTopic(FOO_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); // verify new topic properties NewTopic topicSpec = topicCreation.findFirstGroup(FOO_TOPIC).newTopic(FOO_TOPIC); assertEquals(FOO_TOPIC, topicSpec.name()); assertEquals(DEFAULT_REPLICATION_FACTOR, topicSpec.replicationFactor()); assertEquals(DEFAULT_PARTITIONS, topicSpec.numPartitions()); assertEquals(Map.of(), topicSpec.configs()); } @Test public void topicCreationWithDefaultGroupAndCustomProps() { short replicas = 3; int partitions = 5; long retentionMs = TimeUnit.DAYS.toMillis(30); String compressionType = "lz4"; Map<String, String> topicProps = new HashMap<>(); topicProps.put(COMPRESSION_TYPE_CONFIG, compressionType); topicProps.put(RETENTION_MS_CONFIG, String.valueOf(retentionMs)); sourceProps = defaultConnectorPropsWithTopicCreation(); sourceProps.put(DEFAULT_TOPIC_CREATION_PREFIX + REPLICATION_FACTOR_CONFIG, String.valueOf(replicas)); sourceProps.put(DEFAULT_TOPIC_CREATION_PREFIX + PARTITIONS_CONFIG, String.valueOf(partitions)); topicProps.forEach((k, v) -> sourceProps.put(DEFAULT_TOPIC_CREATION_PREFIX + k, v)); // Setting here but they should be ignored for the default group sourceProps.put(TOPIC_CREATION_PREFIX + DEFAULT_TOPIC_CREATION_GROUP + "." + INCLUDE_REGEX_CONFIG, FOO_REGEX); sourceProps.put(TOPIC_CREATION_PREFIX + DEFAULT_TOPIC_CREATION_GROUP + "." + EXCLUDE_REGEX_CONFIG, BAR_REGEX); // verify config creation sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); assertTrue(sourceConfig.usesTopicCreation()); assertEquals(replicas, (short) sourceConfig.topicCreationReplicationFactor(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(partitions, (int) sourceConfig.topicCreationPartitions(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(".*"), sourceConfig.topicCreationInclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(), sourceConfig.topicCreationExclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(topicProps, sourceConfig.topicCreationOtherConfigs(DEFAULT_TOPIC_CREATION_GROUP)); // verify topic creation group is instantiated correctly Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); assertEquals(1, groups.size()); assertEquals(Set.of(DEFAULT_TOPIC_CREATION_GROUP), groups.keySet()); // verify topic creation TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); TopicCreationGroup group = topicCreation.defaultTopicGroup(); // Default group will match all topics besides empty string assertTrue(group.matches(" ")); assertTrue(group.matches(FOO_TOPIC)); assertEquals(DEFAULT_TOPIC_CREATION_GROUP, group.name()); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertEquals(Map.of(), topicCreation.topicGroups()); assertEquals(topicCreation.defaultTopicGroup(), topicCreation.findFirstGroup(FOO_TOPIC)); topicCreation.addTopic(FOO_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); // verify new topic properties NewTopic topicSpec = topicCreation.findFirstGroup(FOO_TOPIC).newTopic(FOO_TOPIC); assertEquals(FOO_TOPIC, topicSpec.name()); assertEquals(replicas, topicSpec.replicationFactor()); assertEquals(partitions, topicSpec.numPartitions()); assertEquals(topicProps, topicSpec.configs()); } @Test public void topicCreationWithOneGroup() { short fooReplicas = 3; int partitions = 5; sourceProps = defaultConnectorPropsWithTopicCreation(); sourceProps.put(TOPIC_CREATION_GROUPS_CONFIG, String.join(",", FOO_GROUP)); sourceProps.put(DEFAULT_TOPIC_CREATION_PREFIX + PARTITIONS_CONFIG, String.valueOf(partitions)); sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + INCLUDE_REGEX_CONFIG, FOO_REGEX); sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + EXCLUDE_REGEX_CONFIG, BAR_REGEX); sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + REPLICATION_FACTOR_CONFIG, String.valueOf(fooReplicas)); Map<String, String> topicProps = new HashMap<>(); topicProps.put(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT); topicProps.forEach((k, v) -> sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + k, v)); // verify config creation sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); assertTrue(sourceConfig.usesTopicCreation()); assertEquals(DEFAULT_REPLICATION_FACTOR, (short) sourceConfig.topicCreationReplicationFactor(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(partitions, (int) sourceConfig.topicCreationPartitions(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(".*"), sourceConfig.topicCreationInclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(), sourceConfig.topicCreationExclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(Map.of(), sourceConfig.topicCreationOtherConfigs(DEFAULT_TOPIC_CREATION_GROUP)); // verify topic creation group is instantiated correctly Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); assertEquals(2, groups.size()); assertEquals(Set.of(DEFAULT_TOPIC_CREATION_GROUP, FOO_GROUP), groups.keySet()); // verify topic creation TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); TopicCreationGroup defaultGroup = topicCreation.defaultTopicGroup(); // Default group will match all topics besides empty string assertTrue(defaultGroup.matches(" ")); assertTrue(defaultGroup.matches(FOO_TOPIC)); assertTrue(defaultGroup.matches(BAR_TOPIC)); assertEquals(DEFAULT_TOPIC_CREATION_GROUP, defaultGroup.name()); TopicCreationGroup fooGroup = groups.get(FOO_GROUP); assertFalse(fooGroup.matches(" ")); assertTrue(fooGroup.matches(FOO_TOPIC)); assertFalse(fooGroup.matches(BAR_TOPIC)); assertEquals(FOO_GROUP, fooGroup.name()); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertEquals(1, topicCreation.topicGroups().size()); assertEquals(Set.of(FOO_GROUP), topicCreation.topicGroups().keySet()); assertEquals(fooGroup, topicCreation.findFirstGroup(FOO_TOPIC)); topicCreation.addTopic(FOO_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); // verify new topic properties NewTopic defaultTopicSpec = topicCreation.findFirstGroup(BAR_TOPIC).newTopic(BAR_TOPIC); assertEquals(BAR_TOPIC, defaultTopicSpec.name()); assertEquals(DEFAULT_REPLICATION_FACTOR, defaultTopicSpec.replicationFactor()); assertEquals(partitions, defaultTopicSpec.numPartitions()); assertEquals(Map.of(), defaultTopicSpec.configs()); NewTopic fooTopicSpec = topicCreation.findFirstGroup(FOO_TOPIC).newTopic(FOO_TOPIC); assertEquals(FOO_TOPIC, fooTopicSpec.name()); assertEquals(fooReplicas, fooTopicSpec.replicationFactor()); assertEquals(partitions, fooTopicSpec.numPartitions()); assertEquals(topicProps, fooTopicSpec.configs()); } @Test public void topicCreationWithOneGroupAndCombinedRegex() { short fooReplicas = 3; int partitions = 5; sourceProps = defaultConnectorPropsWithTopicCreation(); sourceProps.put(TOPIC_CREATION_GROUPS_CONFIG, String.join(",", FOO_GROUP)); sourceProps.put(DEFAULT_TOPIC_CREATION_PREFIX + PARTITIONS_CONFIG, String.valueOf(partitions)); // Setting here but they should be ignored for the default group sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + INCLUDE_REGEX_CONFIG, String.join("|", FOO_REGEX, BAR_REGEX)); sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + REPLICATION_FACTOR_CONFIG, String.valueOf(fooReplicas)); Map<String, String> topicProps = new HashMap<>(); topicProps.put(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT); topicProps.forEach((k, v) -> sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + k, v)); // verify config creation sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); assertTrue(sourceConfig.usesTopicCreation()); assertEquals(DEFAULT_REPLICATION_FACTOR, (short) sourceConfig.topicCreationReplicationFactor(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(partitions, (int) sourceConfig.topicCreationPartitions(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(".*"), sourceConfig.topicCreationInclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(), sourceConfig.topicCreationExclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(Map.of(), sourceConfig.topicCreationOtherConfigs(DEFAULT_TOPIC_CREATION_GROUP)); // verify topic creation group is instantiated correctly Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); assertEquals(2, groups.size()); assertEquals(Set.of(DEFAULT_TOPIC_CREATION_GROUP, FOO_GROUP), groups.keySet()); // verify topic creation TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); TopicCreationGroup defaultGroup = topicCreation.defaultTopicGroup(); // Default group will match all topics besides empty string assertTrue(defaultGroup.matches(" ")); assertTrue(defaultGroup.matches(FOO_TOPIC)); assertTrue(defaultGroup.matches(BAR_TOPIC)); assertEquals(DEFAULT_TOPIC_CREATION_GROUP, defaultGroup.name()); TopicCreationGroup fooGroup = groups.get(FOO_GROUP); assertFalse(fooGroup.matches(" ")); assertTrue(fooGroup.matches(FOO_TOPIC)); assertTrue(fooGroup.matches(BAR_TOPIC)); assertEquals(FOO_GROUP, fooGroup.name()); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertTrue(topicCreation.isTopicCreationRequired(BAR_TOPIC)); assertEquals(1, topicCreation.topicGroups().size()); assertEquals(Set.of(FOO_GROUP), topicCreation.topicGroups().keySet()); assertEquals(fooGroup, topicCreation.findFirstGroup(FOO_TOPIC)); assertEquals(fooGroup, topicCreation.findFirstGroup(BAR_TOPIC)); topicCreation.addTopic(FOO_TOPIC); topicCreation.addTopic(BAR_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertFalse(topicCreation.isTopicCreationRequired(BAR_TOPIC)); // verify new topic properties NewTopic fooTopicSpec = topicCreation.findFirstGroup(FOO_TOPIC).newTopic(FOO_TOPIC); assertEquals(FOO_TOPIC, fooTopicSpec.name()); assertEquals(fooReplicas, fooTopicSpec.replicationFactor()); assertEquals(partitions, fooTopicSpec.numPartitions()); assertEquals(topicProps, fooTopicSpec.configs()); NewTopic barTopicSpec = topicCreation.findFirstGroup(BAR_TOPIC).newTopic(BAR_TOPIC); assertEquals(BAR_TOPIC, barTopicSpec.name()); assertEquals(fooReplicas, barTopicSpec.replicationFactor()); assertEquals(partitions, barTopicSpec.numPartitions()); assertEquals(topicProps, barTopicSpec.configs()); } @Test public void topicCreationWithTwoGroups() { short fooReplicas = 3; int partitions = 5; int barPartitions = 1; sourceProps = defaultConnectorPropsWithTopicCreation(); sourceProps.put(TOPIC_CREATION_GROUPS_CONFIG, String.join(",", FOO_GROUP, BAR_GROUP)); sourceProps.put(DEFAULT_TOPIC_CREATION_PREFIX + PARTITIONS_CONFIG, String.valueOf(partitions)); // Setting here but they should be ignored for the default group sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + INCLUDE_REGEX_CONFIG, FOO_TOPIC); sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + REPLICATION_FACTOR_CONFIG, String.valueOf(fooReplicas)); sourceProps.put(TOPIC_CREATION_PREFIX + BAR_GROUP + "." + INCLUDE_REGEX_CONFIG, BAR_REGEX); sourceProps.put(TOPIC_CREATION_PREFIX + BAR_GROUP + "." + PARTITIONS_CONFIG, String.valueOf(barPartitions)); Map<String, String> fooTopicProps = new HashMap<>(); fooTopicProps.put(RETENTION_MS_CONFIG, String.valueOf(TimeUnit.DAYS.toMillis(30))); fooTopicProps.forEach((k, v) -> sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + k, v)); Map<String, String> barTopicProps = new HashMap<>(); barTopicProps.put(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT); barTopicProps.forEach((k, v) -> sourceProps.put(TOPIC_CREATION_PREFIX + BAR_GROUP + "." + k, v)); // verify config creation sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); assertTrue(sourceConfig.usesTopicCreation()); assertEquals(DEFAULT_REPLICATION_FACTOR, (short) sourceConfig.topicCreationReplicationFactor(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(partitions, (int) sourceConfig.topicCreationPartitions(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(".*"), sourceConfig.topicCreationInclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(), sourceConfig.topicCreationExclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(Map.of(), sourceConfig.topicCreationOtherConfigs(DEFAULT_TOPIC_CREATION_GROUP)); // verify topic creation group is instantiated correctly Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); assertEquals(3, groups.size()); assertEquals(Set.of(DEFAULT_TOPIC_CREATION_GROUP, FOO_GROUP, BAR_GROUP), groups.keySet()); // verify topic creation TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); TopicCreationGroup defaultGroup = topicCreation.defaultTopicGroup(); // Default group will match all topics besides empty string assertTrue(defaultGroup.matches(" ")); assertTrue(defaultGroup.matches(FOO_TOPIC)); assertTrue(defaultGroup.matches(BAR_TOPIC)); assertEquals(DEFAULT_TOPIC_CREATION_GROUP, defaultGroup.name()); TopicCreationGroup fooGroup = groups.get(FOO_GROUP); assertFalse(fooGroup.matches(" ")); assertTrue(fooGroup.matches(FOO_TOPIC)); assertFalse(fooGroup.matches(BAR_TOPIC)); assertEquals(FOO_GROUP, fooGroup.name()); TopicCreationGroup barGroup = groups.get(BAR_GROUP); assertTrue(barGroup.matches(BAR_TOPIC)); assertFalse(barGroup.matches(FOO_TOPIC)); assertEquals(BAR_GROUP, barGroup.name()); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertTrue(topicCreation.isTopicCreationRequired(BAR_TOPIC)); assertEquals(2, topicCreation.topicGroups().size()); assertEquals(Set.of(FOO_GROUP, BAR_GROUP), topicCreation.topicGroups().keySet()); assertEquals(fooGroup, topicCreation.findFirstGroup(FOO_TOPIC)); assertEquals(barGroup, topicCreation.findFirstGroup(BAR_TOPIC)); topicCreation.addTopic(FOO_TOPIC); topicCreation.addTopic(BAR_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertFalse(topicCreation.isTopicCreationRequired(BAR_TOPIC)); // verify new topic properties String otherTopic = "any-other-topic"; NewTopic defaultTopicSpec = topicCreation.findFirstGroup(otherTopic).newTopic(otherTopic); assertEquals(otherTopic, defaultTopicSpec.name()); assertEquals(DEFAULT_REPLICATION_FACTOR, defaultTopicSpec.replicationFactor()); assertEquals(partitions, defaultTopicSpec.numPartitions()); assertEquals(Map.of(), defaultTopicSpec.configs()); NewTopic fooTopicSpec = topicCreation.findFirstGroup(FOO_TOPIC).newTopic(FOO_TOPIC); assertEquals(FOO_TOPIC, fooTopicSpec.name()); assertEquals(fooReplicas, fooTopicSpec.replicationFactor()); assertEquals(partitions, fooTopicSpec.numPartitions()); assertEquals(fooTopicProps, fooTopicSpec.configs()); NewTopic barTopicSpec = topicCreation.findFirstGroup(BAR_TOPIC).newTopic(BAR_TOPIC); assertEquals(BAR_TOPIC, barTopicSpec.name()); assertEquals(DEFAULT_REPLICATION_FACTOR, barTopicSpec.replicationFactor()); assertEquals(barPartitions, barTopicSpec.numPartitions()); assertEquals(barTopicProps, barTopicSpec.configs()); } @Test public void testTopicCreationWithSingleTransformation() { sourceProps = defaultConnectorPropsWithTopicCreation(); sourceProps.put(TOPIC_CREATION_GROUPS_CONFIG, String.join(",", FOO_GROUP, BAR_GROUP)); String xformName = "example"; String castType = "int8"; sourceProps.put("transforms", xformName); sourceProps.put("transforms." + xformName + ".type", Cast.Value.class.getName()); sourceProps.put("transforms." + xformName + ".spec", castType); sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertEquals(groups.get(DEFAULT_TOPIC_CREATION_GROUP), topicCreation.defaultTopicGroup()); assertEquals(2, topicCreation.topicGroups().size()); assertEquals(Set.of(FOO_GROUP, BAR_GROUP), topicCreation.topicGroups().keySet()); assertEquals(topicCreation.defaultTopicGroup(), topicCreation.findFirstGroup(FOO_TOPIC)); topicCreation.addTopic(FOO_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); List<TransformationStage<SourceRecord>> transformationStages = sourceConfig.transformationStages(MOCK_PLUGINS, CONNECTOR_TASK_ID, METRICS); assertEquals(1, transformationStages.size()); TransformationStage<SourceRecord> xform = transformationStages.get(0); SourceRecord transformed = xform.apply(new SourceRecord(null, null, "topic", 0, null, null, Schema.INT8_SCHEMA, 42)); assertEquals(Schema.Type.INT8, transformed.valueSchema().type()); assertEquals((byte) 42, transformed.value()); } @Test public void topicCreationWithTwoGroupsAndTwoTransformations() { short fooReplicas = 3; int partitions = 5; int barPartitions = 1; sourceProps = defaultConnectorPropsWithTopicCreation(); sourceProps.put(TOPIC_CREATION_GROUPS_CONFIG, String.join(",", FOO_GROUP, BAR_GROUP)); sourceProps.put(DEFAULT_TOPIC_CREATION_PREFIX + PARTITIONS_CONFIG, String.valueOf(partitions)); // Setting here but they should be ignored for the default group sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + INCLUDE_REGEX_CONFIG, FOO_TOPIC); sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + REPLICATION_FACTOR_CONFIG, String.valueOf(fooReplicas)); sourceProps.put(TOPIC_CREATION_PREFIX + BAR_GROUP + "." + INCLUDE_REGEX_CONFIG, BAR_REGEX); sourceProps.put(TOPIC_CREATION_PREFIX + BAR_GROUP + "." + PARTITIONS_CONFIG, String.valueOf(barPartitions)); String castName = "cast"; String castType = "int8"; sourceProps.put("transforms." + castName + ".type", Cast.Value.class.getName()); sourceProps.put("transforms." + castName + ".spec", castType); String regexRouterName = "regex"; sourceProps.put("transforms." + regexRouterName + ".type", RegexRouter.class.getName()); sourceProps.put("transforms." + regexRouterName + ".regex", "(.*)"); sourceProps.put("transforms." + regexRouterName + ".replacement", "prefix-$1"); sourceProps.put("transforms", String.join(",", castName, regexRouterName)); Map<String, String> fooTopicProps = new HashMap<>(); fooTopicProps.put(RETENTION_MS_CONFIG, String.valueOf(TimeUnit.DAYS.toMillis(30))); fooTopicProps.forEach((k, v) -> sourceProps.put(TOPIC_CREATION_PREFIX + FOO_GROUP + "." + k, v)); Map<String, String> barTopicProps = new HashMap<>(); barTopicProps.put(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT); barTopicProps.forEach((k, v) -> sourceProps.put(TOPIC_CREATION_PREFIX + BAR_GROUP + "." + k, v)); // verify config creation sourceConfig = new SourceConnectorConfig(MOCK_PLUGINS, sourceProps, true); assertTrue(sourceConfig.usesTopicCreation()); assertEquals(DEFAULT_REPLICATION_FACTOR, (short) sourceConfig.topicCreationReplicationFactor(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(partitions, (int) sourceConfig.topicCreationPartitions(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(".*"), sourceConfig.topicCreationInclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(List.of(), sourceConfig.topicCreationExclude(DEFAULT_TOPIC_CREATION_GROUP)); assertEquals(Map.of(), sourceConfig.topicCreationOtherConfigs(DEFAULT_TOPIC_CREATION_GROUP)); // verify topic creation group is instantiated correctly Map<String, TopicCreationGroup> groups = TopicCreationGroup.configuredGroups(sourceConfig); assertEquals(3, groups.size()); assertEquals(Set.of(DEFAULT_TOPIC_CREATION_GROUP, FOO_GROUP, BAR_GROUP), groups.keySet()); // verify topic creation TopicCreation topicCreation = TopicCreation.newTopicCreation(workerConfig, groups); TopicCreationGroup defaultGroup = topicCreation.defaultTopicGroup(); // Default group will match all topics besides empty string assertTrue(defaultGroup.matches(" ")); assertTrue(defaultGroup.matches(FOO_TOPIC)); assertTrue(defaultGroup.matches(BAR_TOPIC)); assertEquals(DEFAULT_TOPIC_CREATION_GROUP, defaultGroup.name()); TopicCreationGroup fooGroup = groups.get(FOO_GROUP); assertFalse(fooGroup.matches(" ")); assertTrue(fooGroup.matches(FOO_TOPIC)); assertFalse(fooGroup.matches(BAR_TOPIC)); assertEquals(FOO_GROUP, fooGroup.name()); TopicCreationGroup barGroup = groups.get(BAR_GROUP); assertTrue(barGroup.matches(BAR_TOPIC)); assertFalse(barGroup.matches(FOO_TOPIC)); assertEquals(BAR_GROUP, barGroup.name()); assertTrue(topicCreation.isTopicCreationEnabled()); assertTrue(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertTrue(topicCreation.isTopicCreationRequired(BAR_TOPIC)); assertEquals(2, topicCreation.topicGroups().size()); assertEquals(Set.of(FOO_GROUP, BAR_GROUP), topicCreation.topicGroups().keySet()); assertEquals(fooGroup, topicCreation.findFirstGroup(FOO_TOPIC)); assertEquals(barGroup, topicCreation.findFirstGroup(BAR_TOPIC)); topicCreation.addTopic(FOO_TOPIC); topicCreation.addTopic(BAR_TOPIC); assertFalse(topicCreation.isTopicCreationRequired(FOO_TOPIC)); assertFalse(topicCreation.isTopicCreationRequired(BAR_TOPIC)); // verify new topic properties String otherTopic = "any-other-topic"; NewTopic defaultTopicSpec = topicCreation.findFirstGroup(otherTopic).newTopic(otherTopic); assertEquals(otherTopic, defaultTopicSpec.name()); assertEquals(DEFAULT_REPLICATION_FACTOR, defaultTopicSpec.replicationFactor()); assertEquals(partitions, defaultTopicSpec.numPartitions()); assertEquals(Map.of(), defaultTopicSpec.configs()); NewTopic fooTopicSpec = topicCreation.findFirstGroup(FOO_TOPIC).newTopic(FOO_TOPIC); assertEquals(FOO_TOPIC, fooTopicSpec.name()); assertEquals(fooReplicas, fooTopicSpec.replicationFactor()); assertEquals(partitions, fooTopicSpec.numPartitions()); assertEquals(fooTopicProps, fooTopicSpec.configs()); NewTopic barTopicSpec = topicCreation.findFirstGroup(BAR_TOPIC).newTopic(BAR_TOPIC); assertEquals(BAR_TOPIC, barTopicSpec.name()); assertEquals(DEFAULT_REPLICATION_FACTOR, barTopicSpec.replicationFactor()); assertEquals(barPartitions, barTopicSpec.numPartitions()); assertEquals(barTopicProps, barTopicSpec.configs()); List<TransformationStage<SourceRecord>> transformationStages = sourceConfig.transformationStages(MOCK_PLUGINS, CONNECTOR_TASK_ID, METRICS); assertEquals(2, transformationStages.size()); TransformationStage<SourceRecord> castXForm = transformationStages.get(0); SourceRecord transformed = castXForm.apply(new SourceRecord(null, null, "topic", 0, null, null, Schema.INT8_SCHEMA, 42)); assertEquals(Schema.Type.INT8, transformed.valueSchema().type()); assertEquals((byte) 42, transformed.value()); TransformationStage<SourceRecord> regexRouterXForm = transformationStages.get(1); transformed = regexRouterXForm.apply(new SourceRecord(null, null, "topic", 0, null, null, Schema.INT8_SCHEMA, 42)); assertEquals("prefix-topic", transformed.topic()); } }
googleapis/google-cloud-java
36,581
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/CreateRegionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1beta/regions.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1beta; /** * * * <pre> * Request message for the `CreateRegion` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.CreateRegionRequest} */ public final class CreateRegionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.CreateRegionRequest) CreateRegionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateRegionRequest.newBuilder() to construct. private CreateRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateRegionRequest() { parent_ = ""; regionId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateRegionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.RegionsProto .internal_static_google_shopping_merchant_accounts_v1beta_CreateRegionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.RegionsProto .internal_static_google_shopping_merchant_accounts_v1beta_CreateRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest.class, com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The account to create a region for. * Format: `accounts/{account}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The account to create a region for. * Format: `accounts/{account}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REGION_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object regionId_ = ""; /** * * * <pre> * Required. The identifier for the region, unique over all regions of the * same account. * </pre> * * <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The regionId. */ @java.lang.Override public java.lang.String getRegionId() { java.lang.Object ref = regionId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); regionId_ = s; return s; } } /** * * * <pre> * Required. The identifier for the region, unique over all regions of the * same account. * </pre> * * <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for regionId. */ @java.lang.Override public com.google.protobuf.ByteString getRegionIdBytes() { java.lang.Object ref = regionId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); regionId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REGION_FIELD_NUMBER = 3; private com.google.shopping.merchant.accounts.v1beta.Region region_; /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the region field is set. */ @java.lang.Override public boolean hasRegion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The region. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.Region getRegion() { return region_ == null ? com.google.shopping.merchant.accounts.v1beta.Region.getDefaultInstance() : region_; } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder getRegionOrBuilder() { return region_ == null ? com.google.shopping.merchant.accounts.v1beta.Region.getDefaultInstance() : region_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(regionId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, regionId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getRegion()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(regionId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, regionId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getRegion()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest other = (com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getRegionId().equals(other.getRegionId())) return false; if (hasRegion() != other.hasRegion()) return false; if (hasRegion()) { if (!getRegion().equals(other.getRegion())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + REGION_ID_FIELD_NUMBER; hash = (53 * hash) + getRegionId().hashCode(); if (hasRegion()) { hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `CreateRegion` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.CreateRegionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.CreateRegionRequest) com.google.shopping.merchant.accounts.v1beta.CreateRegionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.RegionsProto .internal_static_google_shopping_merchant_accounts_v1beta_CreateRegionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.RegionsProto .internal_static_google_shopping_merchant_accounts_v1beta_CreateRegionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest.class, com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest.Builder.class); } // Construct using com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getRegionFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; regionId_ = ""; region_ = null; if (regionBuilder_ != null) { regionBuilder_.dispose(); regionBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1beta.RegionsProto .internal_static_google_shopping_merchant_accounts_v1beta_CreateRegionRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest build() { com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest buildPartial() { com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest result = new com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.regionId_ = regionId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.region_ = regionBuilder_ == null ? region_ : regionBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest) { return mergeFrom((com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest other) { if (other == com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRegionId().isEmpty()) { regionId_ = other.regionId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasRegion()) { mergeRegion(other.getRegion()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { regionId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getRegionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The account to create a region for. * Format: `accounts/{account}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The account to create a region for. * Format: `accounts/{account}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The account to create a region for. * Format: `accounts/{account}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The account to create a region for. * Format: `accounts/{account}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The account to create a region for. * Format: `accounts/{account}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object regionId_ = ""; /** * * * <pre> * Required. The identifier for the region, unique over all regions of the * same account. * </pre> * * <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The regionId. */ public java.lang.String getRegionId() { java.lang.Object ref = regionId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); regionId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The identifier for the region, unique over all regions of the * same account. * </pre> * * <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for regionId. */ public com.google.protobuf.ByteString getRegionIdBytes() { java.lang.Object ref = regionId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); regionId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The identifier for the region, unique over all regions of the * same account. * </pre> * * <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The regionId to set. * @return This builder for chaining. */ public Builder setRegionId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } regionId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The identifier for the region, unique over all regions of the * same account. * </pre> * * <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearRegionId() { regionId_ = getDefaultInstance().getRegionId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The identifier for the region, unique over all regions of the * same account. * </pre> * * <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for regionId to set. * @return This builder for chaining. */ public Builder setRegionIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); regionId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.shopping.merchant.accounts.v1beta.Region region_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1beta.Region, com.google.shopping.merchant.accounts.v1beta.Region.Builder, com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder> regionBuilder_; /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the region field is set. */ public boolean hasRegion() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The region. */ public com.google.shopping.merchant.accounts.v1beta.Region getRegion() { if (regionBuilder_ == null) { return region_ == null ? com.google.shopping.merchant.accounts.v1beta.Region.getDefaultInstance() : region_; } else { return regionBuilder_.getMessage(); } } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRegion(com.google.shopping.merchant.accounts.v1beta.Region value) { if (regionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } region_ = value; } else { regionBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRegion( com.google.shopping.merchant.accounts.v1beta.Region.Builder builderForValue) { if (regionBuilder_ == null) { region_ = builderForValue.build(); } else { regionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeRegion(com.google.shopping.merchant.accounts.v1beta.Region value) { if (regionBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && region_ != null && region_ != com.google.shopping.merchant.accounts.v1beta.Region.getDefaultInstance()) { getRegionBuilder().mergeFrom(value); } else { region_ = value; } } else { regionBuilder_.mergeFrom(value); } if (region_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearRegion() { bitField0_ = (bitField0_ & ~0x00000004); region_ = null; if (regionBuilder_ != null) { regionBuilder_.dispose(); regionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.accounts.v1beta.Region.Builder getRegionBuilder() { bitField0_ |= 0x00000004; onChanged(); return getRegionFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder getRegionOrBuilder() { if (regionBuilder_ != null) { return regionBuilder_.getMessageOrBuilder(); } else { return region_ == null ? com.google.shopping.merchant.accounts.v1beta.Region.getDefaultInstance() : region_; } } /** * * * <pre> * Required. The region to create. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.Region region = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1beta.Region, com.google.shopping.merchant.accounts.v1beta.Region.Builder, com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder> getRegionFieldBuilder() { if (regionBuilder_ == null) { regionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1beta.Region, com.google.shopping.merchant.accounts.v1beta.Region.Builder, com.google.shopping.merchant.accounts.v1beta.RegionOrBuilder>( getRegion(), getParentForChildren(), isClean()); region_ = null; } return regionBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.CreateRegionRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.CreateRegionRequest) private static final com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest(); } public static com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateRegionRequest> PARSER = new com.google.protobuf.AbstractParser<CreateRegionRequest>() { @java.lang.Override public CreateRegionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateRegionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateRegionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.CreateRegionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,645
java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/ListCustomersResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.channel.v1; /** * * * <pre> * Response message for * [CloudChannelService.ListCustomers][google.cloud.channel.v1.CloudChannelService.ListCustomers]. * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListCustomersResponse} */ public final class ListCustomersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.ListCustomersResponse) ListCustomersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCustomersResponse.newBuilder() to construct. private ListCustomersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCustomersResponse() { customers_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCustomersResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListCustomersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListCustomersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListCustomersResponse.class, com.google.cloud.channel.v1.ListCustomersResponse.Builder.class); } public static final int CUSTOMERS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.channel.v1.Customer> customers_; /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.channel.v1.Customer> getCustomersList() { return customers_; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.channel.v1.CustomerOrBuilder> getCustomersOrBuilderList() { return customers_; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ @java.lang.Override public int getCustomersCount() { return customers_.size(); } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.Customer getCustomers(int index) { return customers_.get(index); } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.CustomerOrBuilder getCustomersOrBuilder(int index) { return customers_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListCustomersRequest.page_token][google.cloud.channel.v1.ListCustomersRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListCustomersRequest.page_token][google.cloud.channel.v1.ListCustomersRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < customers_.size(); i++) { output.writeMessage(1, customers_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < customers_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, customers_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.ListCustomersResponse)) { return super.equals(obj); } com.google.cloud.channel.v1.ListCustomersResponse other = (com.google.cloud.channel.v1.ListCustomersResponse) obj; if (!getCustomersList().equals(other.getCustomersList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCustomersCount() > 0) { hash = (37 * hash) + CUSTOMERS_FIELD_NUMBER; hash = (53 * hash) + getCustomersList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListCustomersResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListCustomersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListCustomersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.channel.v1.ListCustomersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [CloudChannelService.ListCustomers][google.cloud.channel.v1.CloudChannelService.ListCustomers]. * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListCustomersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.ListCustomersResponse) com.google.cloud.channel.v1.ListCustomersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListCustomersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListCustomersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListCustomersResponse.class, com.google.cloud.channel.v1.ListCustomersResponse.Builder.class); } // Construct using com.google.cloud.channel.v1.ListCustomersResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (customersBuilder_ == null) { customers_ = java.util.Collections.emptyList(); } else { customers_ = null; customersBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListCustomersResponse_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.ListCustomersResponse getDefaultInstanceForType() { return com.google.cloud.channel.v1.ListCustomersResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.ListCustomersResponse build() { com.google.cloud.channel.v1.ListCustomersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.ListCustomersResponse buildPartial() { com.google.cloud.channel.v1.ListCustomersResponse result = new com.google.cloud.channel.v1.ListCustomersResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.channel.v1.ListCustomersResponse result) { if (customersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { customers_ = java.util.Collections.unmodifiableList(customers_); bitField0_ = (bitField0_ & ~0x00000001); } result.customers_ = customers_; } else { result.customers_ = customersBuilder_.build(); } } private void buildPartial0(com.google.cloud.channel.v1.ListCustomersResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.ListCustomersResponse) { return mergeFrom((com.google.cloud.channel.v1.ListCustomersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.channel.v1.ListCustomersResponse other) { if (other == com.google.cloud.channel.v1.ListCustomersResponse.getDefaultInstance()) return this; if (customersBuilder_ == null) { if (!other.customers_.isEmpty()) { if (customers_.isEmpty()) { customers_ = other.customers_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCustomersIsMutable(); customers_.addAll(other.customers_); } onChanged(); } } else { if (!other.customers_.isEmpty()) { if (customersBuilder_.isEmpty()) { customersBuilder_.dispose(); customersBuilder_ = null; customers_ = other.customers_; bitField0_ = (bitField0_ & ~0x00000001); customersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCustomersFieldBuilder() : null; } else { customersBuilder_.addAllMessages(other.customers_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.channel.v1.Customer m = input.readMessage( com.google.cloud.channel.v1.Customer.parser(), extensionRegistry); if (customersBuilder_ == null) { ensureCustomersIsMutable(); customers_.add(m); } else { customersBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.channel.v1.Customer> customers_ = java.util.Collections.emptyList(); private void ensureCustomersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { customers_ = new java.util.ArrayList<com.google.cloud.channel.v1.Customer>(customers_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.Customer, com.google.cloud.channel.v1.Customer.Builder, com.google.cloud.channel.v1.CustomerOrBuilder> customersBuilder_; /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public java.util.List<com.google.cloud.channel.v1.Customer> getCustomersList() { if (customersBuilder_ == null) { return java.util.Collections.unmodifiableList(customers_); } else { return customersBuilder_.getMessageList(); } } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public int getCustomersCount() { if (customersBuilder_ == null) { return customers_.size(); } else { return customersBuilder_.getCount(); } } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public com.google.cloud.channel.v1.Customer getCustomers(int index) { if (customersBuilder_ == null) { return customers_.get(index); } else { return customersBuilder_.getMessage(index); } } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder setCustomers(int index, com.google.cloud.channel.v1.Customer value) { if (customersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomersIsMutable(); customers_.set(index, value); onChanged(); } else { customersBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder setCustomers( int index, com.google.cloud.channel.v1.Customer.Builder builderForValue) { if (customersBuilder_ == null) { ensureCustomersIsMutable(); customers_.set(index, builderForValue.build()); onChanged(); } else { customersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder addCustomers(com.google.cloud.channel.v1.Customer value) { if (customersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomersIsMutable(); customers_.add(value); onChanged(); } else { customersBuilder_.addMessage(value); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder addCustomers(int index, com.google.cloud.channel.v1.Customer value) { if (customersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCustomersIsMutable(); customers_.add(index, value); onChanged(); } else { customersBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder addCustomers(com.google.cloud.channel.v1.Customer.Builder builderForValue) { if (customersBuilder_ == null) { ensureCustomersIsMutable(); customers_.add(builderForValue.build()); onChanged(); } else { customersBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder addCustomers( int index, com.google.cloud.channel.v1.Customer.Builder builderForValue) { if (customersBuilder_ == null) { ensureCustomersIsMutable(); customers_.add(index, builderForValue.build()); onChanged(); } else { customersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder addAllCustomers( java.lang.Iterable<? extends com.google.cloud.channel.v1.Customer> values) { if (customersBuilder_ == null) { ensureCustomersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, customers_); onChanged(); } else { customersBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder clearCustomers() { if (customersBuilder_ == null) { customers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { customersBuilder_.clear(); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public Builder removeCustomers(int index) { if (customersBuilder_ == null) { ensureCustomersIsMutable(); customers_.remove(index); onChanged(); } else { customersBuilder_.remove(index); } return this; } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public com.google.cloud.channel.v1.Customer.Builder getCustomersBuilder(int index) { return getCustomersFieldBuilder().getBuilder(index); } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public com.google.cloud.channel.v1.CustomerOrBuilder getCustomersOrBuilder(int index) { if (customersBuilder_ == null) { return customers_.get(index); } else { return customersBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public java.util.List<? extends com.google.cloud.channel.v1.CustomerOrBuilder> getCustomersOrBuilderList() { if (customersBuilder_ != null) { return customersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(customers_); } } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public com.google.cloud.channel.v1.Customer.Builder addCustomersBuilder() { return getCustomersFieldBuilder() .addBuilder(com.google.cloud.channel.v1.Customer.getDefaultInstance()); } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public com.google.cloud.channel.v1.Customer.Builder addCustomersBuilder(int index) { return getCustomersFieldBuilder() .addBuilder(index, com.google.cloud.channel.v1.Customer.getDefaultInstance()); } /** * * * <pre> * The customers belonging to a reseller or distributor. * </pre> * * <code>repeated .google.cloud.channel.v1.Customer customers = 1;</code> */ public java.util.List<com.google.cloud.channel.v1.Customer.Builder> getCustomersBuilderList() { return getCustomersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.Customer, com.google.cloud.channel.v1.Customer.Builder, com.google.cloud.channel.v1.CustomerOrBuilder> getCustomersFieldBuilder() { if (customersBuilder_ == null) { customersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.Customer, com.google.cloud.channel.v1.Customer.Builder, com.google.cloud.channel.v1.CustomerOrBuilder>( customers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); customers_ = null; } return customersBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListCustomersRequest.page_token][google.cloud.channel.v1.ListCustomersRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListCustomersRequest.page_token][google.cloud.channel.v1.ListCustomersRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListCustomersRequest.page_token][google.cloud.channel.v1.ListCustomersRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListCustomersRequest.page_token][google.cloud.channel.v1.ListCustomersRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListCustomersRequest.page_token][google.cloud.channel.v1.ListCustomersRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.ListCustomersResponse) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.ListCustomersResponse) private static final com.google.cloud.channel.v1.ListCustomersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.ListCustomersResponse(); } public static com.google.cloud.channel.v1.ListCustomersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCustomersResponse> PARSER = new com.google.protobuf.AbstractParser<ListCustomersResponse>() { @java.lang.Override public ListCustomersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCustomersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCustomersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.ListCustomersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google-wallet/rest-samples
36,542
java/src/main/java/com/google/developers/wallet/rest/DemoGiftCard.java
/* * Copyright 2022 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.developers.wallet.rest; // [START setup] // [START imports] import com.auth0.jwt.JWT; import com.auth0.jwt.algorithms.Algorithm; import com.google.api.client.googleapis.batch.BatchRequest; import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.*; import com.google.api.client.json.gson.GsonFactory; import com.google.api.services.walletobjects.*; import com.google.api.services.walletobjects.model.*; import com.google.auth.http.HttpCredentialsAdapter; import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.ServiceAccountCredentials; import java.io.*; import java.security.interfaces.RSAPrivateKey; import java.util.*; // [END imports] /** Demo class for creating and managing Gift cards in Google Wallet. */ public class DemoGiftCard { /** * Path to service account key file from Google Cloud Console. Environment variable: * GOOGLE_APPLICATION_CREDENTIALS. */ public static String keyFilePath; /** Service account credentials for Google Wallet APIs. */ public static GoogleCredentials credentials; /** Google Wallet service client. */ public static Walletobjects service; public DemoGiftCard() throws Exception { keyFilePath = System.getenv().getOrDefault("GOOGLE_APPLICATION_CREDENTIALS", "/path/to/key.json"); auth(); } // [END setup] // [START auth] /** * Create authenticated HTTP client using a service account file. * */ public void auth() throws Exception { credentials = GoogleCredentials.fromStream(new FileInputStream(keyFilePath)) .createScoped(List.of(WalletobjectsScopes.WALLET_OBJECT_ISSUER)); credentials.refresh(); HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); // Initialize Google Wallet API service service = new Walletobjects.Builder( httpTransport, GsonFactory.getDefaultInstance(), new HttpCredentialsAdapter(credentials)) .setApplicationName("APPLICATION_NAME") .build(); } // [END auth] // [START createClass] /** * Create a class. * * @param issuerId The issuer ID being used for this request. * @param classSuffix Developer-defined unique ID for this pass class. * @return The pass class ID: "{issuerId}.{classSuffix}" */ public String createClass(String issuerId, String classSuffix) throws IOException { // Check if the class exists try { service.giftcardclass().get(String.format("%s.%s", issuerId, classSuffix)).execute(); System.out.printf("Class %s.%s already exists!%n", issuerId, classSuffix); return String.format("%s.%s", issuerId, classSuffix); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() != 404) { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, classSuffix); } } // See link below for more information on required properties // https://developers.google.com/wallet/retail/gift-cards/rest/v1/giftcardclass GiftCardClass newClass = new GiftCardClass() .setId(String.format("%s.%s", issuerId, classSuffix)) .setIssuerName("Issuer name") .setReviewStatus("UNDER_REVIEW"); GiftCardClass response = service.giftcardclass().insert(newClass).execute(); System.out.println("Class insert response"); System.out.println(response.toPrettyString()); return response.getId(); } // [END createClass] // [START updateClass] /** * Update a class. * * <p><strong>Warning:</strong> This replaces all existing class attributes! * * @param issuerId The issuer ID being used for this request. * @param classSuffix Developer-defined unique ID for this pass class. * @return The pass class ID: "{issuerId}.{classSuffix}" */ public String updateClass(String issuerId, String classSuffix) throws IOException { GiftCardClass updatedClass; // Check if the class exists try { updatedClass = service.giftcardclass().get(String.format("%s.%s", issuerId, classSuffix)).execute(); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() == 404) { // Class does not exist System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix); return String.format("%s.%s", issuerId, classSuffix); } else { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, classSuffix); } } // Class exists // Update the class by adding a homepage updatedClass.setHomepageUri( new Uri() .setUri("https://developers.google.com/wallet") .setDescription("Homepage description")); // Note: reviewStatus must be 'UNDER_REVIEW' or 'DRAFT' for updates updatedClass.setReviewStatus("UNDER_REVIEW"); GiftCardClass response = service .giftcardclass() .update(String.format("%s.%s", issuerId, classSuffix), updatedClass) .execute(); System.out.println("Class update response"); System.out.println(response.toPrettyString()); return response.getId(); } // [END updateClass] // [START patchClass] /** * Patch a class. * * <p>The PATCH method supports patch semantics. * * @param issuerId The issuer ID being used for this request. * @param classSuffix Developer-defined unique ID for this pass class. * @return The pass class ID: "{issuerId}.{classSuffix}" */ public String patchClass(String issuerId, String classSuffix) throws IOException { // Check if the class exists try { service.giftcardclass().get(String.format("%s.%s", issuerId, classSuffix)).execute(); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() == 404) { // Class does not exist System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix); return String.format("%s.%s", issuerId, classSuffix); } else { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, classSuffix); } } // Class exists // Patch the class by adding a homepage GiftCardClass patchBody = new GiftCardClass() .setHomepageUri( new Uri() .setUri("https://developers.google.com/wallet") .setDescription("Homepage description")) // Note: reviewStatus must be 'UNDER_REVIEW' or 'DRAFT' for updates .setReviewStatus("UNDER_REVIEW"); GiftCardClass response = service .giftcardclass() .patch(String.format("%s.%s", issuerId, classSuffix), patchBody) .execute(); System.out.println("Class patch response"); System.out.println(response.toPrettyString()); return response.getId(); } // [END patchClass] // [START addMessageClass] /** * Add a message to a pass class. * * @param issuerId The issuer ID being used for this request. * @param classSuffix Developer-defined unique ID for this pass class. * @param header The message header. * @param body The message body. * @return The pass class ID: "{issuerId}.{classSuffix}" */ public String addClassMessage(String issuerId, String classSuffix, String header, String body) throws IOException { // Check if the class exists try { service.giftcardclass().get(String.format("%s.%s", issuerId, classSuffix)).execute(); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() == 404) { // Class does not exist System.out.printf("Class %s.%s not found!%n", issuerId, classSuffix); return String.format("%s.%s", issuerId, classSuffix); } else { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, classSuffix); } } AddMessageRequest message = new AddMessageRequest().setMessage(new Message().setHeader(header).setBody(body)); GiftCardClassAddMessageResponse response = service .giftcardclass() .addmessage(String.format("%s.%s", issuerId, classSuffix), message) .execute(); System.out.println("Class addMessage response"); System.out.println(response.toPrettyString()); return String.format("%s.%s", issuerId, classSuffix); } // [END addMessageClass] // [START createObject] /** * Create an object. * * @param issuerId The issuer ID being used for this request. * @param classSuffix Developer-defined unique ID for this pass class. * @param objectSuffix Developer-defined unique ID for this pass object. * @return The pass object ID: "{issuerId}.{objectSuffix}" */ public String createObject(String issuerId, String classSuffix, String objectSuffix) throws IOException { // Check if the object exists try { service.giftcardobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute(); System.out.printf("Object %s.%s already exists!%n", issuerId, objectSuffix); return String.format("%s.%s", issuerId, objectSuffix); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() != 404) { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, objectSuffix); } } // See link below for more information on required properties // https://developers.google.com/wallet/retail/gift-cards/rest/v1/giftcardobject GiftCardObject newObject = new GiftCardObject() .setId(String.format("%s.%s", issuerId, objectSuffix)) .setClassId(String.format("%s.%s", issuerId, classSuffix)) .setState("ACTIVE") .setHeroImage( new Image() .setSourceUri( new ImageUri() .setUri( "https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg")) .setContentDescription( new LocalizedString() .setDefaultValue( new TranslatedString() .setLanguage("en-US") .setValue("Hero image description")))) .setTextModulesData( List.of( new TextModuleData() .setHeader("Text module header") .setBody("Text module body") .setId("TEXT_MODULE_ID"))) .setLinksModuleData( new LinksModuleData() .setUris( Arrays.asList( new Uri() .setUri("http://maps.google.com/") .setDescription("Link module URI description") .setId("LINK_MODULE_URI_ID"), new Uri() .setUri("tel:6505555555") .setDescription("Link module tel description") .setId("LINK_MODULE_TEL_ID")))) .setImageModulesData( List.of( new ImageModuleData() .setMainImage( new Image() .setSourceUri( new ImageUri() .setUri( "http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg")) .setContentDescription( new LocalizedString() .setDefaultValue( new TranslatedString() .setLanguage("en-US") .setValue("Image module description")))) .setId("IMAGE_MODULE_ID"))) .setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value")) .setLocations( List.of( new LatLongPoint() .setLatitude(37.424015499999996) .setLongitude(-122.09259560000001))) .setCardNumber("Card number") .setPin("1234") .setBalance(new Money().setMicros(20000000L).setCurrencyCode("USD")) .setBalanceUpdateTime(new DateTime().setDate("2020-04-12T16:20:50.52-04:00")); GiftCardObject response = service.giftcardobject().insert(newObject).execute(); System.out.println("Object insert response"); System.out.println(response.toPrettyString()); return response.getId(); } // [END createObject] // [START updateObject] /** * Update an object. * * <p><strong>Warning:</strong> This replaces all existing object attributes! * * @param issuerId The issuer ID being used for this request. * @param objectSuffix Developer-defined unique ID for this pass object. * @return The pass object ID: "{issuerId}.{objectSuffix}" */ public String updateObject(String issuerId, String objectSuffix) throws IOException { GiftCardObject updatedObject; // Check if the object exists try { updatedObject = service.giftcardobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute(); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() == 404) { // Object does not exist System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix); return String.format("%s.%s", issuerId, objectSuffix); } else { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, objectSuffix); } } // Object exists // Update the object by adding a link Uri newLink = new Uri() .setUri("https://developers.google.com/wallet") .setDescription("New link description"); if (updatedObject.getLinksModuleData() == null) { // LinksModuleData was not set on the original object updatedObject.setLinksModuleData(new LinksModuleData().setUris(List.of(newLink))); } else { updatedObject.getLinksModuleData().getUris().add(newLink); } GiftCardObject response = service .giftcardobject() .update(String.format("%s.%s", issuerId, objectSuffix), updatedObject) .execute(); System.out.println("Object update response"); System.out.println(response.toPrettyString()); return response.getId(); } // [END updateObject] // [START patchObject] /** * Patch an object. * * @param issuerId The issuer ID being used for this request. * @param objectSuffix Developer-defined unique ID for this pass object. * @return The pass object ID: "{issuerId}.{objectSuffix}" */ public String patchObject(String issuerId, String objectSuffix) throws IOException { GiftCardObject existingObject; // Check if the object exists try { existingObject = service.giftcardobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute(); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() == 404) { // Object does not exist System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix); return String.format("%s.%s", issuerId, objectSuffix); } else { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, objectSuffix); } } // Object exists // Patch the object by adding a link Uri newLink = new Uri() .setUri("https://developers.google.com/wallet") .setDescription("New link description"); GiftCardObject patchBody = new GiftCardObject(); if (existingObject.getLinksModuleData() == null) { // LinksModuleData was not set on the original object patchBody.setLinksModuleData(new LinksModuleData().setUris(new ArrayList<Uri>())); } else { patchBody.setLinksModuleData(existingObject.getLinksModuleData()); } patchBody.getLinksModuleData().getUris().add(newLink); GiftCardObject response = service .giftcardobject() .patch(String.format("%s.%s", issuerId, objectSuffix), patchBody) .execute(); System.out.println("Object patch response"); System.out.println(response.toPrettyString()); return response.getId(); } // [END patchObject] // [START expireObject] /** * Expire an object. * * <p>Sets the object's state to Expired. If the valid time interval is already set, the pass will * expire automatically up to 24 hours after. * * @param issuerId The issuer ID being used for this request. * @param objectSuffix Developer-defined unique ID for this pass object. * @return The pass object ID: "{issuerId}.{objectSuffix}" */ public String expireObject(String issuerId, String objectSuffix) throws IOException { // Check if the object exists try { service.giftcardobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute(); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() == 404) { // Object does not exist System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix); return String.format("%s.%s", issuerId, objectSuffix); } else { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, objectSuffix); } } // Patch the object, setting the pass as expired GiftCardObject patchBody = new GiftCardObject().setState("EXPIRED"); GiftCardObject response = service .giftcardobject() .patch(String.format("%s.%s", issuerId, objectSuffix), patchBody) .execute(); System.out.println("Object expiration response"); System.out.println(response.toPrettyString()); return response.getId(); } // [END expireObject] // [START addMessageObject] /** * Add a message to a pass object. * * @param issuerId The issuer ID being used for this request. * @param objectSuffix Developer-defined unique ID for this pass object. * @param header The message header. * @param body The message body. * @return The pass object ID: "{issuerId}.{objectSuffix}" */ public String addObjectMessage(String issuerId, String objectSuffix, String header, String body) throws IOException { // Check if the object exists try { service.giftcardobject().get(String.format("%s.%s", issuerId, objectSuffix)).execute(); } catch (GoogleJsonResponseException ex) { if (ex.getStatusCode() == 404) { // Object does not exist System.out.printf("Object %s.%s not found!%n", issuerId, objectSuffix); return String.format("%s.%s", issuerId, objectSuffix); } else { // Something else went wrong... ex.printStackTrace(); return String.format("%s.%s", issuerId, objectSuffix); } } AddMessageRequest message = new AddMessageRequest().setMessage(new Message().setHeader(header).setBody(body)); GiftCardObjectAddMessageResponse response = service .giftcardobject() .addmessage(String.format("%s.%s", issuerId, objectSuffix), message) .execute(); System.out.println("Object addMessage response"); System.out.println(response.toPrettyString()); return String.format("%s.%s", issuerId, objectSuffix); } // [END addMessageObject] // [START jwtNew] /** * Generate a signed JWT that creates a new pass class and object. * * <p>When the user opens the "Add to Google Wallet" URL and saves the pass to their wallet, the * pass class and object defined in the JWT are created. This allows you to create multiple pass * classes and objects in one API call when the user saves the pass to their wallet. * * @param issuerId The issuer ID being used for this request. * @param classSuffix Developer-defined unique ID for this pass class. * @param objectSuffix Developer-defined unique ID for the pass object. * @return An "Add to Google Wallet" link. */ public String createJWTNewObjects(String issuerId, String classSuffix, String objectSuffix) { // See link below for more information on required properties // https://developers.google.com/wallet/retail/gift-cards/rest/v1/giftcardclass GiftCardClass newClass = new GiftCardClass() .setId(String.format("%s.%s", issuerId, classSuffix)) .setIssuerName("Issuer name") .setReviewStatus("UNDER_REVIEW"); // See link below for more information on required properties // https://developers.google.com/wallet/retail/gift-cards/rest/v1/giftcardobject GiftCardObject newObject = new GiftCardObject() .setId(String.format("%s.%s", issuerId, objectSuffix)) .setClassId(String.format("%s.%s", issuerId, classSuffix)) .setState("ACTIVE") .setHeroImage( new Image() .setSourceUri( new ImageUri() .setUri( "https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg")) .setContentDescription( new LocalizedString() .setDefaultValue( new TranslatedString() .setLanguage("en-US") .setValue("Hero image description")))) .setTextModulesData( List.of( new TextModuleData() .setHeader("Text module header") .setBody("Text module body") .setId("TEXT_MODULE_ID"))) .setLinksModuleData( new LinksModuleData() .setUris( Arrays.asList( new Uri() .setUri("http://maps.google.com/") .setDescription("Link module URI description") .setId("LINK_MODULE_URI_ID"), new Uri() .setUri("tel:6505555555") .setDescription("Link module tel description") .setId("LINK_MODULE_TEL_ID")))) .setImageModulesData( List.of( new ImageModuleData() .setMainImage( new Image() .setSourceUri( new ImageUri() .setUri( "http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg")) .setContentDescription( new LocalizedString() .setDefaultValue( new TranslatedString() .setLanguage("en-US") .setValue("Image module description")))) .setId("IMAGE_MODULE_ID"))) .setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value")) .setLocations( List.of( new LatLongPoint() .setLatitude(37.424015499999996) .setLongitude(-122.09259560000001))) .setCardNumber("Card number") .setPin("1234") .setBalance(new Money().setMicros(20000000L).setCurrencyCode("USD")) .setBalanceUpdateTime(new DateTime().setDate("2020-04-12T16:20:50.52-04:00")); // Create the JWT as a HashMap object HashMap<String, Object> claims = new HashMap<String, Object>(); claims.put("iss", ((ServiceAccountCredentials) credentials).getClientEmail()); claims.put("aud", "google"); claims.put("origins", List.of("www.example.com")); claims.put("typ", "savetowallet"); // Create the Google Wallet payload and add to the JWT HashMap<String, Object> payload = new HashMap<String, Object>(); payload.put("giftCardClasses", List.of(newClass)); payload.put("giftCardObjects", List.of(newObject)); claims.put("payload", payload); // The service account credentials are used to sign the JWT Algorithm algorithm = Algorithm.RSA256( null, (RSAPrivateKey) ((ServiceAccountCredentials) credentials).getPrivateKey()); String token = JWT.create().withPayload(claims).sign(algorithm); System.out.println("Add to Google Wallet link"); System.out.printf("https://pay.google.com/gp/v/save/%s%n", token); return String.format("https://pay.google.com/gp/v/save/%s", token); } // [END jwtNew] // [START jwtExisting] /** * Generate a signed JWT that references an existing pass object. * * <p>When the user opens the "Add to Google Wallet" URL and saves the pass to their wallet, the * pass objects defined in the JWT are added to the user's Google Wallet app. This allows the user * to save multiple pass objects in one API call. * * <p>The objects to add must follow the below format: * * <p>{ 'id': 'ISSUER_ID.OBJECT_SUFFIX', 'classId': 'ISSUER_ID.CLASS_SUFFIX' } * * @param issuerId The issuer ID being used for this request. * @return An "Add to Google Wallet" link. */ public String createJWTExistingObjects(String issuerId) { // Multiple pass types can be added at the same time // At least one type must be specified in the JWT claims // Note: Make sure to replace the placeholder class and object suffixes HashMap<String, Object> objectsToAdd = new HashMap<String, Object>(); // Event tickets objectsToAdd.put( "eventTicketObjects", List.of( new EventTicketObject() .setId(String.format("%s.%s", issuerId, "EVENT_OBJECT_SUFFIX")) .setClassId(String.format("%s.%s", issuerId, "EVENT_CLASS_SUFFIX")))); // Boarding passes objectsToAdd.put( "flightObjects", List.of( new FlightObject() .setId(String.format("%s.%s", issuerId, "FLIGHT_OBJECT_SUFFIX")) .setClassId(String.format("%s.%s", issuerId, "FLIGHT_CLASS_SUFFIX")))); // Generic passes objectsToAdd.put( "genericObjects", List.of( new GenericObject() .setId(String.format("%s.%s", issuerId, "GENERIC_OBJECT_SUFFIX")) .setClassId(String.format("%s.%s", issuerId, "GENERIC_CLASS_SUFFIX")))); // Gift cards objectsToAdd.put( "giftCardObjects", List.of( new GiftCardObject() .setId(String.format("%s.%s", issuerId, "GIFT_CARD_OBJECT_SUFFIX")) .setClassId(String.format("%s.%s", issuerId, "GIFT_CARD_CLASS_SUFFIX")))); // Loyalty cards objectsToAdd.put( "loyaltyObjects", List.of( new LoyaltyObject() .setId(String.format("%s.%s", issuerId, "LOYALTY_OBJECT_SUFFIX")) .setClassId(String.format("%s.%s", issuerId, "LOYALTY_CLASS_SUFFIX")))); // Offers objectsToAdd.put( "offerObjects", List.of( new OfferObject() .setId(String.format("%s.%s", issuerId, "OFFER_OBJECT_SUFFIX")) .setClassId(String.format("%s.%s", issuerId, "OFFER_CLASS_SUFFIX")))); // Transit passes objectsToAdd.put( "transitObjects", List.of( new TransitObject() .setId(String.format("%s.%s", issuerId, "TRANSIT_OBJECT_SUFFIX")) .setClassId(String.format("%s.%s", issuerId, "TRANSIT_CLASS_SUFFIX")))); // Create the JWT as a HashMap object HashMap<String, Object> claims = new HashMap<String, Object>(); claims.put("iss", ((ServiceAccountCredentials) credentials).getClientEmail()); claims.put("aud", "google"); claims.put("origins", List.of("www.example.com")); claims.put("typ", "savetowallet"); claims.put("payload", objectsToAdd); // The service account credentials are used to sign the JWT Algorithm algorithm = Algorithm.RSA256( null, (RSAPrivateKey) ((ServiceAccountCredentials) credentials).getPrivateKey()); String token = JWT.create().withPayload(claims).sign(algorithm); System.out.println("Add to Google Wallet link"); System.out.printf("https://pay.google.com/gp/v/save/%s%n", token); return String.format("https://pay.google.com/gp/v/save/%s", token); } // [END jwtExisting] // [START batch] /** * Batch create Google Wallet objects from an existing class. * * @param issuerId The issuer ID being used for this request. * @param classSuffix Developer-defined unique ID for this pass class. */ public void batchCreateObjects(String issuerId, String classSuffix) throws IOException { // Create the batch request client BatchRequest batch = service.batch(new HttpCredentialsAdapter(credentials)); // The callback will be invoked for each request in the batch JsonBatchCallback<GiftCardObject> callback = new JsonBatchCallback<GiftCardObject>() { // Invoked if the request was successful public void onSuccess(GiftCardObject response, HttpHeaders responseHeaders) { System.out.println("Batch insert response"); System.out.println(response.toString()); } // Invoked if the request failed public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { System.out.println("Error Message: " + e.getMessage()); } }; // Example: Generate three new pass objects for (int i = 0; i < 3; i++) { // Generate a random object suffix String objectSuffix = UUID.randomUUID().toString().replaceAll("[^\\w.-]", "_"); // See link below for more information on required properties // https://developers.google.com/wallet/retail/gift-cards/rest/v1/giftcardobject GiftCardObject batchObject = new GiftCardObject() .setId(String.format("%s.%s", issuerId, objectSuffix)) .setClassId(String.format("%s.%s", issuerId, classSuffix)) .setState("ACTIVE") .setHeroImage( new Image() .setSourceUri( new ImageUri() .setUri( "https://farm4.staticflickr.com/3723/11177041115_6e6a3b6f49_o.jpg")) .setContentDescription( new LocalizedString() .setDefaultValue( new TranslatedString() .setLanguage("en-US") .setValue("Hero image description")))) .setTextModulesData( List.of( new TextModuleData() .setHeader("Text module header") .setBody("Text module body") .setId("TEXT_MODULE_ID"))) .setLinksModuleData( new LinksModuleData() .setUris( Arrays.asList( new Uri() .setUri("http://maps.google.com/") .setDescription("Link module URI description") .setId("LINK_MODULE_URI_ID"), new Uri() .setUri("tel:6505555555") .setDescription("Link module tel description") .setId("LINK_MODULE_TEL_ID")))) .setImageModulesData( List.of( new ImageModuleData() .setMainImage( new Image() .setSourceUri( new ImageUri() .setUri( "http://farm4.staticflickr.com/3738/12440799783_3dc3c20606_b.jpg")) .setContentDescription( new LocalizedString() .setDefaultValue( new TranslatedString() .setLanguage("en-US") .setValue("Image module description")))) .setId("IMAGE_MODULE_ID"))) .setBarcode(new Barcode().setType("QR_CODE").setValue("QR code value")) .setLocations( List.of( new LatLongPoint() .setLatitude(37.424015499999996) .setLongitude(-122.09259560000001))) .setCardNumber("Card number") .setPin("1234") .setBalance(new Money().setMicros(20000000L).setCurrencyCode("USD")) .setBalanceUpdateTime(new DateTime().setDate("2020-04-12T16:20:50.52-04:00")); service.giftcardobject().insert(batchObject).queue(batch, callback); } // Invoke the batch API calls batch.execute(); } // [END batch] }
google/j2objc
36,818
jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/stream/DoubleStream.java
/* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.util.stream; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collection; import java.util.DoubleSummaryStatistics; import java.util.Objects; import java.util.OptionalDouble; import java.util.PrimitiveIterator; import java.util.Spliterator; import java.util.Spliterators; import java.util.concurrent.ConcurrentHashMap; import java.util.function.BiConsumer; import java.util.function.DoubleBinaryOperator; import java.util.function.DoubleConsumer; import java.util.function.DoubleFunction; import java.util.function.DoublePredicate; import java.util.function.DoubleSupplier; import java.util.function.DoubleToIntFunction; import java.util.function.DoubleToLongFunction; import java.util.function.DoubleUnaryOperator; import java.util.function.Function; import java.util.function.ObjDoubleConsumer; import java.util.function.Supplier; /** * A sequence of primitive double-valued elements supporting sequential and parallel * aggregate operations. This is the {@code double} primitive specialization of * {@link Stream}. * * <p>The following example illustrates an aggregate operation using * {@link Stream} and {@link DoubleStream}, computing the sum of the weights of the * red widgets: * * <pre>{@code * double sum = widgets.stream() * .filter(w -> w.getColor() == RED) * .mapToDouble(w -> w.getWeight()) * .sum(); * }</pre> * * See the class documentation for {@link Stream} and the package documentation * for <a href="package-summary.html">java.util.stream</a> for additional * specification of streams, stream operations, stream pipelines, and * parallelism. * * @since 1.8 * @see Stream * @see <a href="package-summary.html">java.util.stream</a> */ public interface DoubleStream extends BaseStream<Double, DoubleStream> { /** * Returns a stream consisting of the elements of this stream that match * the given predicate. * * <p>This is an <a href="package-summary.html#StreamOps">intermediate * operation</a>. * * @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * predicate to apply to each element to determine if it * should be included * @return the new stream */ DoubleStream filter(DoublePredicate predicate); /** * Returns a stream consisting of the results of applying the given * function to the elements of this stream. * * <p>This is an <a href="package-summary.html#StreamOps">intermediate * operation</a>. * * @param mapper a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function to apply to each element * @return the new stream */ DoubleStream map(DoubleUnaryOperator mapper); /** * Returns an object-valued {@code Stream} consisting of the results of * applying the given function to the elements of this stream. * * <p>This is an <a href="package-summary.html#StreamOps"> * intermediate operation</a>. * * @param <U> the element type of the new stream * @param mapper a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function to apply to each element * @return the new stream */ <U> Stream<U> mapToObj(DoubleFunction<? extends U> mapper); /** * Returns an {@code IntStream} consisting of the results of applying the * given function to the elements of this stream. * * <p>This is an <a href="package-summary.html#StreamOps">intermediate * operation</a>. * * @param mapper a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function to apply to each element * @return the new stream */ IntStream mapToInt(DoubleToIntFunction mapper); /** * Returns a {@code LongStream} consisting of the results of applying the * given function to the elements of this stream. * * <p>This is an <a href="package-summary.html#StreamOps">intermediate * operation</a>. * * @param mapper a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function to apply to each element * @return the new stream */ LongStream mapToLong(DoubleToLongFunction mapper); /** * Returns a stream consisting of the results of replacing each element of * this stream with the contents of a mapped stream produced by applying * the provided mapping function to each element. Each mapped stream is * {@link java.util.stream.BaseStream#close() closed} after its contents * have been placed into this stream. (If a mapped stream is {@code null} * an empty stream is used, instead.) * * <p>This is an <a href="package-summary.html#StreamOps">intermediate * operation</a>. * * @param mapper a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function to apply to each element which produces a * {@code DoubleStream} of new values * @return the new stream * @see Stream#flatMap(Function) */ DoubleStream flatMap(DoubleFunction<? extends DoubleStream> mapper); /** * Returns a stream consisting of the distinct elements of this stream. The * elements are compared for equality according to * {@link java.lang.Double#compare(double, double)}. * * <p>This is a <a href="package-summary.html#StreamOps">stateful * intermediate operation</a>. * * @return the result stream */ DoubleStream distinct(); /** * Returns a stream consisting of the elements of this stream in sorted * order. The elements are compared for equality according to * {@link java.lang.Double#compare(double, double)}. * * <p>This is a <a href="package-summary.html#StreamOps">stateful * intermediate operation</a>. * * @return the result stream */ DoubleStream sorted(); /** * Returns a stream consisting of the elements of this stream, additionally * performing the provided action on each element as elements are consumed * from the resulting stream. * * <p>This is an <a href="package-summary.html#StreamOps">intermediate * operation</a>. * * <p>For parallel stream pipelines, the action may be called at * whatever time and in whatever thread the element is made available by the * upstream operation. If the action modifies shared state, * it is responsible for providing the required synchronization. * * @apiNote This method exists mainly to support debugging, where you want * to see the elements as they flow past a certain point in a pipeline: * <pre>{@code * DoubleStream.of(1, 2, 3, 4) * .filter(e -> e > 2) * .peek(e -> System.out.println("Filtered value: " + e)) * .map(e -> e * e) * .peek(e -> System.out.println("Mapped value: " + e)) * .sum(); * }</pre> * * @param action a <a href="package-summary.html#NonInterference"> * non-interfering</a> action to perform on the elements as * they are consumed from the stream * @return the new stream */ DoubleStream peek(DoubleConsumer action); /** * Returns a stream consisting of the elements of this stream, truncated * to be no longer than {@code maxSize} in length. * * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting * stateful intermediate operation</a>. * * @apiNote * While {@code limit()} is generally a cheap operation on sequential * stream pipelines, it can be quite expensive on ordered parallel pipelines, * especially for large values of {@code maxSize}, since {@code limit(n)} * is constrained to return not just any <em>n</em> elements, but the * <em>first n</em> elements in the encounter order. Using an unordered * stream source (such as {@link #generate(DoubleSupplier)}) or removing the * ordering constraint with {@link #unordered()} may result in significant * speedups of {@code limit()} in parallel pipelines, if the semantics of * your situation permit. If consistency with encounter order is required, * and you are experiencing poor performance or memory utilization with * {@code limit()} in parallel pipelines, switching to sequential execution * with {@link #sequential()} may improve performance. * * @param maxSize the number of elements the stream should be limited to * @return the new stream * @throws IllegalArgumentException if {@code maxSize} is negative */ DoubleStream limit(long maxSize); /** * Returns a stream consisting of the remaining elements of this stream * after discarding the first {@code n} elements of the stream. * If this stream contains fewer than {@code n} elements then an * empty stream will be returned. * * <p>This is a <a href="package-summary.html#StreamOps">stateful * intermediate operation</a>. * * @apiNote * While {@code skip()} is generally a cheap operation on sequential * stream pipelines, it can be quite expensive on ordered parallel pipelines, * especially for large values of {@code n}, since {@code skip(n)} * is constrained to skip not just any <em>n</em> elements, but the * <em>first n</em> elements in the encounter order. Using an unordered * stream source (such as {@link #generate(DoubleSupplier)}) or removing the * ordering constraint with {@link #unordered()} may result in significant * speedups of {@code skip()} in parallel pipelines, if the semantics of * your situation permit. If consistency with encounter order is required, * and you are experiencing poor performance or memory utilization with * {@code skip()} in parallel pipelines, switching to sequential execution * with {@link #sequential()} may improve performance. * * @param n the number of leading elements to skip * @return the new stream * @throws IllegalArgumentException if {@code n} is negative */ DoubleStream skip(long n); /** * Performs an action for each element of this stream. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * <p>For parallel stream pipelines, this operation does <em>not</em> * guarantee to respect the encounter order of the stream, as doing so * would sacrifice the benefit of parallelism. For any given element, the * action may be performed at whatever time and in whatever thread the * library chooses. If the action accesses shared state, it is * responsible for providing the required synchronization. * * @param action a <a href="package-summary.html#NonInterference"> * non-interfering</a> action to perform on the elements */ void forEach(DoubleConsumer action); /** * Performs an action for each element of this stream, guaranteeing that * each element is processed in encounter order for streams that have a * defined encounter order. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @param action a <a href="package-summary.html#NonInterference"> * non-interfering</a> action to perform on the elements * @see #forEach(DoubleConsumer) */ void forEachOrdered(DoubleConsumer action); /** * Returns an array containing the elements of this stream. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @return an array containing the elements of this stream */ double[] toArray(); /** * Performs a <a href="package-summary.html#Reduction">reduction</a> on the * elements of this stream, using the provided identity value and an * <a href="package-summary.html#Associativity">associative</a> * accumulation function, and returns the reduced value. This is equivalent * to: * <pre>{@code * double result = identity; * for (double element : this stream) * result = accumulator.applyAsDouble(result, element) * return result; * }</pre> * * but is not constrained to execute sequentially. * * <p>The {@code identity} value must be an identity for the accumulator * function. This means that for all {@code x}, * {@code accumulator.apply(identity, x)} is equal to {@code x}. * The {@code accumulator} function must be an * <a href="package-summary.html#Associativity">associative</a> function. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @apiNote Sum, min, max, and average are all special cases of reduction. * Summing a stream of numbers can be expressed as: * <pre>{@code * double sum = numbers.reduce(0, (a, b) -> a+b); * }</pre> * * or more compactly: * * <pre>{@code * double sum = numbers.reduce(0, Double::sum); * }</pre> * * <p>While this may seem a more roundabout way to perform an aggregation * compared to simply mutating a running total in a loop, reduction * operations parallelize more gracefully, without needing additional * synchronization and with greatly reduced risk of data races. * * @param identity the identity value for the accumulating function * @param op an <a href="package-summary.html#Associativity">associative</a>, * <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function for combining two values * @return the result of the reduction * @see #sum() * @see #min() * @see #max() * @see #average() */ double reduce(double identity, DoubleBinaryOperator op); /** * Performs a <a href="package-summary.html#Reduction">reduction</a> on the * elements of this stream, using an * <a href="package-summary.html#Associativity">associative</a> accumulation * function, and returns an {@code OptionalDouble} describing the reduced * value, if any. This is equivalent to: * <pre>{@code * boolean foundAny = false; * double result = null; * for (double element : this stream) { * if (!foundAny) { * foundAny = true; * result = element; * } * else * result = accumulator.applyAsDouble(result, element); * } * return foundAny ? OptionalDouble.of(result) : OptionalDouble.empty(); * }</pre> * * but is not constrained to execute sequentially. * * <p>The {@code accumulator} function must be an * <a href="package-summary.html#Associativity">associative</a> function. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @param op an <a href="package-summary.html#Associativity">associative</a>, * <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function for combining two values * @return the result of the reduction * @see #reduce(double, DoubleBinaryOperator) */ OptionalDouble reduce(DoubleBinaryOperator op); /** * Performs a <a href="package-summary.html#MutableReduction">mutable * reduction</a> operation on the elements of this stream. A mutable * reduction is one in which the reduced value is a mutable result container, * such as an {@code ArrayList}, and elements are incorporated by updating * the state of the result rather than by replacing the result. This * produces a result equivalent to: * <pre>{@code * R result = supplier.get(); * for (double element : this stream) * accumulator.accept(result, element); * return result; * }</pre> * * <p>Like {@link #reduce(double, DoubleBinaryOperator)}, {@code collect} * operations can be parallelized without requiring additional * synchronization. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @param <R> type of the result * @param supplier a function that creates a new result container. For a * parallel execution, this function may be called * multiple times and must return a fresh value each time. * @param accumulator an <a href="package-summary.html#Associativity">associative</a>, * <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function for incorporating an additional element into a result * @param combiner an <a href="package-summary.html#Associativity">associative</a>, * <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * function for combining two values, which must be * compatible with the accumulator function * @return the result of the reduction * @see Stream#collect(Supplier, BiConsumer, BiConsumer) */ <R> R collect(Supplier<R> supplier, ObjDoubleConsumer<R> accumulator, BiConsumer<R, R> combiner); /** * Returns the sum of elements in this stream. * * Summation is a special case of a <a * href="package-summary.html#Reduction">reduction</a>. If * floating-point summation were exact, this method would be * equivalent to: * * <pre>{@code * return reduce(0, Double::sum); * }</pre> * * However, since floating-point summation is not exact, the above * code is not necessarily equivalent to the summation computation * done by this method. * * <p>If any stream element is a NaN or the sum is at any point a NaN * then the sum will be NaN. * * The value of a floating-point sum is a function both * of the input values as well as the order of addition * operations. The order of addition operations of this method is * intentionally not defined to allow for implementation * flexibility to improve the speed and accuracy of the computed * result. * * In particular, this method may be implemented using compensated * summation or other technique to reduce the error bound in the * numerical sum compared to a simple summation of {@code double} * values. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @apiNote Elements sorted by increasing absolute magnitude tend * to yield more accurate results. * * @return the sum of elements in this stream */ double sum(); /** * Returns an {@code OptionalDouble} describing the minimum element of this * stream, or an empty OptionalDouble if this stream is empty. The minimum * element will be {@code Double.NaN} if any stream element was NaN. Unlike * the numerical comparison operators, this method considers negative zero * to be strictly smaller than positive zero. This is a special case of a * <a href="package-summary.html#Reduction">reduction</a> and is * equivalent to: * <pre>{@code * return reduce(Double::min); * }</pre> * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @return an {@code OptionalDouble} containing the minimum element of this * stream, or an empty optional if the stream is empty */ OptionalDouble min(); /** * Returns an {@code OptionalDouble} describing the maximum element of this * stream, or an empty OptionalDouble if this stream is empty. The maximum * element will be {@code Double.NaN} if any stream element was NaN. Unlike * the numerical comparison operators, this method considers negative zero * to be strictly smaller than positive zero. This is a * special case of a * <a href="package-summary.html#Reduction">reduction</a> and is * equivalent to: * <pre>{@code * return reduce(Double::max); * }</pre> * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @return an {@code OptionalDouble} containing the maximum element of this * stream, or an empty optional if the stream is empty */ OptionalDouble max(); /** * Returns the count of elements in this stream. This is a special case of * a <a href="package-summary.html#Reduction">reduction</a> and is * equivalent to: * <pre>{@code * return mapToLong(e -> 1L).sum(); * }</pre> * * <p>This is a <a href="package-summary.html#StreamOps">terminal operation</a>. * * @return the count of elements in this stream */ long count(); /** * Returns an {@code OptionalDouble} describing the arithmetic * mean of elements of this stream, or an empty optional if this * stream is empty. * * If any recorded value is a NaN or the sum is at any point a NaN * then the average will be NaN. * * <p>The average returned can vary depending upon the order in * which values are recorded. * * This method may be implemented using compensated summation or * other technique to reduce the error bound in the {@link #sum * numerical sum} used to compute the average. * * <p>The average is a special case of a <a * href="package-summary.html#Reduction">reduction</a>. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @apiNote Elements sorted by increasing absolute magnitude tend * to yield more accurate results. * * @return an {@code OptionalDouble} containing the average element of this * stream, or an empty optional if the stream is empty */ OptionalDouble average(); /** * Returns a {@code DoubleSummaryStatistics} describing various summary data * about the elements of this stream. This is a special * case of a <a href="package-summary.html#Reduction">reduction</a>. * * <p>This is a <a href="package-summary.html#StreamOps">terminal * operation</a>. * * @return a {@code DoubleSummaryStatistics} describing various summary data * about the elements of this stream */ DoubleSummaryStatistics summaryStatistics(); /** * Returns whether any elements of this stream match the provided * predicate. May not evaluate the predicate on all elements if not * necessary for determining the result. If the stream is empty then * {@code false} is returned and the predicate is not evaluated. * * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting * terminal operation</a>. * * @apiNote * This method evaluates the <em>existential quantification</em> of the * predicate over the elements of the stream (for some x P(x)). * * @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * predicate to apply to elements of this stream * @return {@code true} if any elements of the stream match the provided * predicate, otherwise {@code false} */ boolean anyMatch(DoublePredicate predicate); /** * Returns whether all elements of this stream match the provided predicate. * May not evaluate the predicate on all elements if not necessary for * determining the result. If the stream is empty then {@code true} is * returned and the predicate is not evaluated. * * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting * terminal operation</a>. * * @apiNote * This method evaluates the <em>universal quantification</em> of the * predicate over the elements of the stream (for all x P(x)). If the * stream is empty, the quantification is said to be <em>vacuously * satisfied</em> and is always {@code true} (regardless of P(x)). * * @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * predicate to apply to elements of this stream * @return {@code true} if either all elements of the stream match the * provided predicate or the stream is empty, otherwise {@code false} */ boolean allMatch(DoublePredicate predicate); /** * Returns whether no elements of this stream match the provided predicate. * May not evaluate the predicate on all elements if not necessary for * determining the result. If the stream is empty then {@code true} is * returned and the predicate is not evaluated. * * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting * terminal operation</a>. * * @apiNote * This method evaluates the <em>universal quantification</em> of the * negated predicate over the elements of the stream (for all x ~P(x)). If * the stream is empty, the quantification is said to be vacuously satisfied * and is always {@code true}, regardless of P(x). * * @param predicate a <a href="package-summary.html#NonInterference">non-interfering</a>, * <a href="package-summary.html#Statelessness">stateless</a> * predicate to apply to elements of this stream * @return {@code true} if either no elements of the stream match the * provided predicate or the stream is empty, otherwise {@code false} */ boolean noneMatch(DoublePredicate predicate); /** * Returns an {@link OptionalDouble} describing the first element of this * stream, or an empty {@code OptionalDouble} if the stream is empty. If * the stream has no encounter order, then any element may be returned. * * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting * terminal operation</a>. * * @return an {@code OptionalDouble} describing the first element of this * stream, or an empty {@code OptionalDouble} if the stream is empty */ OptionalDouble findFirst(); /** * Returns an {@link OptionalDouble} describing some element of the stream, * or an empty {@code OptionalDouble} if the stream is empty. * * <p>This is a <a href="package-summary.html#StreamOps">short-circuiting * terminal operation</a>. * * <p>The behavior of this operation is explicitly nondeterministic; it is * free to select any element in the stream. This is to allow for maximal * performance in parallel operations; the cost is that multiple invocations * on the same source may not return the same result. (If a stable result * is desired, use {@link #findFirst()} instead.) * * @return an {@code OptionalDouble} describing some element of this stream, * or an empty {@code OptionalDouble} if the stream is empty * @see #findFirst() */ OptionalDouble findAny(); /** * Returns a {@code Stream} consisting of the elements of this stream, * boxed to {@code Double}. * * <p>This is an <a href="package-summary.html#StreamOps">intermediate * operation</a>. * * @return a {@code Stream} consistent of the elements of this stream, * each boxed to a {@code Double} */ Stream<Double> boxed(); @Override DoubleStream sequential(); @Override DoubleStream parallel(); @Override PrimitiveIterator.OfDouble iterator(); @Override Spliterator.OfDouble spliterator(); // Static factories /** * Returns a builder for a {@code DoubleStream}. * * @return a stream builder */ public static Builder builder() { return new Streams.DoubleStreamBuilderImpl(); } /** * Returns an empty sequential {@code DoubleStream}. * * @return an empty sequential stream */ public static DoubleStream empty() { return StreamSupport.doubleStream(Spliterators.emptyDoubleSpliterator(), false); } /** * Returns a sequential {@code DoubleStream} containing a single element. * * @param t the single element * @return a singleton sequential stream */ public static DoubleStream of(double t) { return StreamSupport.doubleStream(new Streams.DoubleStreamBuilderImpl(t), false); } /** * Returns a sequential ordered stream whose elements are the specified values. * * @param values the elements of the new stream * @return the new stream */ public static DoubleStream of(double... values) { return Arrays.stream(values); } /** * Returns an infinite sequential ordered {@code DoubleStream} produced by iterative * application of a function {@code f} to an initial element {@code seed}, * producing a {@code Stream} consisting of {@code seed}, {@code f(seed)}, * {@code f(f(seed))}, etc. * * <p>The first element (position {@code 0}) in the {@code DoubleStream} * will be the provided {@code seed}. For {@code n > 0}, the element at * position {@code n}, will be the result of applying the function {@code f} * to the element at position {@code n - 1}. * * @param seed the initial element * @param f a function to be applied to to the previous element to produce * a new element * @return a new sequential {@code DoubleStream} */ public static DoubleStream iterate(final double seed, final DoubleUnaryOperator f) { Objects.requireNonNull(f); final PrimitiveIterator.OfDouble iterator = new PrimitiveIterator.OfDouble() { double t = seed; @Override public boolean hasNext() { return true; } @Override public double nextDouble() { double v = t; t = f.applyAsDouble(t); return v; } }; return StreamSupport.doubleStream(Spliterators.spliteratorUnknownSize( iterator, Spliterator.ORDERED | Spliterator.IMMUTABLE | Spliterator.NONNULL), false); } /** * Returns an infinite sequential unordered stream where each element is * generated by the provided {@code DoubleSupplier}. This is suitable for * generating constant streams, streams of random elements, etc. * * @param s the {@code DoubleSupplier} for generated elements * @return a new infinite sequential unordered {@code DoubleStream} */ public static DoubleStream generate(DoubleSupplier s) { Objects.requireNonNull(s); return StreamSupport.doubleStream( new StreamSpliterators.InfiniteSupplyingSpliterator.OfDouble(Long.MAX_VALUE, s), false); } /** * Creates a lazily concatenated stream whose elements are all the * elements of the first stream followed by all the elements of the * second stream. The resulting stream is ordered if both * of the input streams are ordered, and parallel if either of the input * streams is parallel. When the resulting stream is closed, the close * handlers for both input streams are invoked. * * @implNote * Use caution when constructing streams from repeated concatenation. * Accessing an element of a deeply concatenated stream can result in deep * call chains, or even {@code StackOverflowException}. * * @param a the first stream * @param b the second stream * @return the concatenation of the two input streams */ public static DoubleStream concat(DoubleStream a, DoubleStream b) { Objects.requireNonNull(a); Objects.requireNonNull(b); Spliterator.OfDouble split = new Streams.ConcatSpliterator.OfDouble( a.spliterator(), b.spliterator()); DoubleStream stream = StreamSupport.doubleStream(split, a.isParallel() || b.isParallel()); return stream.onClose(Streams.composedClose(a, b)); } /** * A mutable builder for a {@code DoubleStream}. * * <p>A stream builder has a lifecycle, which starts in a building * phase, during which elements can be added, and then transitions to a built * phase, after which elements may not be added. The built phase * begins when the {@link #build()} method is called, which creates an * ordered stream whose elements are the elements that were added to the * stream builder, in the order they were added. * * @see DoubleStream#builder() * @since 1.8 */ public interface Builder extends DoubleConsumer { /** * Adds an element to the stream being built. * * @throws IllegalStateException if the builder has already transitioned * to the built state */ @Override void accept(double t); /** * Adds an element to the stream being built. * * @implSpec * The default implementation behaves as if: * <pre>{@code * accept(t) * return this; * }</pre> * * @param t the element to add * @return {@code this} builder * @throws IllegalStateException if the builder has already transitioned * to the built state */ default Builder add(double t) { accept(t); return this; } /** * Builds the stream, transitioning this builder to the built state. * An {@code IllegalStateException} is thrown if there are further * attempts to operate on the builder after it has entered the built * state. * * @return the built stream * @throws IllegalStateException if the builder has already transitioned * to the built state */ DoubleStream build(); } }
google/truth
36,924
extensions/proto/src/main/java/com/google/common/truth/extensions/proto/ProtoSubject.java
/* * Copyright (c) 2016 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.truth.extensions.proto; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Lists.asList; import static com.google.common.truth.Fact.fact; import static com.google.common.truth.Fact.simpleFact; import static com.google.common.truth.extensions.proto.FieldScopeUtil.asList; import com.google.common.truth.FailureMetadata; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.ExtensionRegistry; import com.google.protobuf.Message; import com.google.protobuf.ProtobufToStringOutput; import com.google.protobuf.TypeRegistry; import java.util.Arrays; import java.util.Objects; import org.jspecify.annotations.Nullable; /** * Truth subject for the full version of Protocol Buffers. * * <p>{@code ProtoTruth.assertThat(actual).isEqualTo(expected)} performs the same assertion as * {@code Truth.assertThat(actual).isEqualTo(expected)}, but with a better failure message. By * default, the assertions are strict with respect to repeated field order, missing fields, etc. * This behavior can be changed with the configuration methods on this subject, e.g. {@code * ProtoTruth.assertThat(actual).ignoringRepeatedFieldOrder().isEqualTo(expected)}. * * <p>By default, floating-point fields are compared using exact equality, which is <a * href="https://truth.dev/floating_point">probably not what you want</a> if the values are the * results of some arithmetic. To check for approximate equality, use {@link #usingDoubleTolerance}, * {@link #usingFloatTolerance}, and {@linkplain #usingDoubleToleranceForFields(double, int, int...) * their per-field equivalents}. * * <p>Equality tests, and other methods, may yield slightly different behavior for versions 2 and 3 * of Protocol Buffers. If testing protos of multiple versions, make sure you understand the * behaviors of default and unknown fields so you don't under or over test. */ public class ProtoSubject extends LiteProtoSubject { /* * Storing a FailureMetadata instance in a Subject subclass is generally a bad practice. For an * explanation of why it works out OK here, see LiteProtoSubject. */ private final FailureMetadata metadata; private final Message actual; private final FluentEqualityConfig config; protected ProtoSubject(FailureMetadata failureMetadata, @Nullable Message message) { this(failureMetadata, FluentEqualityConfig.defaultInstance(), message); } ProtoSubject( FailureMetadata failureMetadata, FluentEqualityConfig config, @Nullable Message message) { super(failureMetadata, message); this.metadata = failureMetadata; this.actual = message; this.config = config; } ProtoFluentAssertionImpl usingConfig(FluentEqualityConfig newConfig) { return new ProtoFluentAssertionImpl(new ProtoSubject(metadata, newConfig, actual)); } /** * Specifies that the 'has' bit of individual fields should be ignored when comparing for * equality. * * <p>For version 2 Protocol Buffers, this setting determines whether two protos with the same * value for a field compare equal if one explicitly sets the value, and the other merely * implicitly uses the schema-defined default. This setting also determines whether unknown fields * should be considered in the comparison. By {@code ignoringFieldAbsence()}, unknown fields are * ignored, and value-equal fields as specified above are considered equal. * * <p>For version 3 Protocol Buffers, this setting does not affect primitive fields, because their * default value is indistinguishable from unset. */ public ProtoFluentAssertion ignoringFieldAbsence() { return usingConfig(config.ignoringFieldAbsence()); } /** * Specifies that the 'has' bit of these explicitly specified top-level field numbers should be * ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if they are to be ignored as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ public ProtoFluentAssertion ignoringFieldAbsenceOfFields(int firstFieldNumber, int... rest) { return usingConfig(config.ignoringFieldAbsenceOfFields(asList(firstFieldNumber, rest))); } /** * Specifies that the 'has' bit of these explicitly specified top-level field numbers should be * ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if they are to be ignored as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ public ProtoFluentAssertion ignoringFieldAbsenceOfFields(Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringFieldAbsenceOfFields(fieldNumbers)); } /** * Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored * when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored * as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ public ProtoFluentAssertion ignoringFieldAbsenceOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.ignoringFieldAbsenceOfFieldDescriptors(asList(firstFieldDescriptor, rest))); } /** * Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored * when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored * as well. * * <p>Use {@link #ignoringFieldAbsence()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsence() for details */ public ProtoFluentAssertion ignoringFieldAbsenceOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.ignoringFieldAbsenceOfFieldDescriptors(fieldDescriptors)); } /** * Specifies that the ordering of repeated fields, at all levels, should be ignored when comparing * for equality. * * <p>This setting applies to all repeated fields recursively, but it does not ignore structure. * For example, with {@link #ignoringRepeatedFieldOrder()}, a repeated {@code int32} field {@code * bar}, set inside a repeated message field {@code foo}, the following protos will all compare * equal: * * <pre>{@code * message1: { * foo: { * bar: 1 * bar: 2 * } * foo: { * bar: 3 * bar: 4 * } * } * * message2: { * foo: { * bar: 2 * bar: 1 * } * foo: { * bar: 4 * bar: 3 * } * } * * message3: { * foo: { * bar: 4 * bar: 3 * } * foo: { * bar: 2 * bar: 1 * } * } * }</pre> * * <p>However, the following message will compare equal to none of these: * * <pre>{@code * message4: { * foo: { * bar: 1 * bar: 3 * } * foo: { * bar: 2 * bar: 4 * } * } * }</pre> * * <p>This setting does not apply to map fields, for which field order is always ignored. The * serialization order of map fields is undefined, and it may change from runtime to runtime. */ public ProtoFluentAssertion ignoringRepeatedFieldOrder() { return usingConfig(config.ignoringRepeatedFieldOrder()); } /** * Specifies that the ordering of repeated fields for these explicitly specified top-level field * numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly * (via {@link FieldDescriptor}) if their orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFields( int firstFieldNumber, int... rest) { return usingConfig(config.ignoringRepeatedFieldOrderOfFields(asList(firstFieldNumber, rest))); } /** * Specifies that the ordering of repeated fields for these explicitly specified top-level field * numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly * (via {@link FieldDescriptor}) if their orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFields(Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringRepeatedFieldOrderOfFields(fieldNumbers)); } /** * Specifies that the ordering of repeated fields for these explicitly specified field descriptors * should be ignored when comparing for equality. Sub-fields must be specified explicitly if their * orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.ignoringRepeatedFieldOrderOfFieldDescriptors(asList(firstFieldDescriptor, rest))); } /** * Specifies that the ordering of repeated fields for these explicitly specified field descriptors * should be ignored when comparing for equality. Sub-fields must be specified explicitly if their * orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrder()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrder() for details. */ public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.ignoringRepeatedFieldOrderOfFieldDescriptors(fieldDescriptors)); } /** * Specifies that, for all repeated and map fields, any elements in the 'actual' proto which are * not found in the 'expected' proto are ignored, with the exception of fields in the expected * proto which are empty. To ignore empty repeated fields as well, use {@link * #comparingExpectedFieldsOnly}. * * <p>This rule is applied independently from {@link #ignoringRepeatedFieldOrder}. If ignoring * repeated field order AND extra repeated field elements, all that is tested is that the expected * elements comprise a subset of the actual elements. If not ignoring repeated field order, but * still ignoring extra repeated field elements, the actual elements must contain a subsequence * that matches the expected elements for the test to pass. (The subsequence rule does not apply * to Map fields, which are always compared by key.) */ public ProtoFluentAssertion ignoringExtraRepeatedFieldElements() { return usingConfig(config.ignoringExtraRepeatedFieldElements()); } /** * Specifies that extra repeated field elements for these explicitly specified top-level field * numbers should be ignored. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if their extra elements are to be ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFields( int firstFieldNumber, int... rest) { return usingConfig( config.ignoringExtraRepeatedFieldElementsOfFields(asList(firstFieldNumber, rest))); } /** * Specifies that extra repeated field elements for these explicitly specified top-level field * numbers should be ignored. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if their extra elements are to be ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFields( Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringExtraRepeatedFieldElementsOfFields(fieldNumbers)); } /** * Specifies that extra repeated field elements for these explicitly specified field descriptors * should be ignored. Sub-fields must be specified explicitly if their extra elements are to be * ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFieldDescriptors( FieldDescriptor first, FieldDescriptor... rest) { return usingConfig( config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(asList(first, rest))); } /** * Specifies that extra repeated field elements for these explicitly specified field descriptors * should be ignored. Sub-fields must be specified explicitly if their extra elements are to be * ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElements()} instead to ignore these for all fields. * * @see #ignoringExtraRepeatedFieldElements() for details. */ public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig( config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(fieldDescriptors)); } /** * Compares double fields as equal if they are both finite and their absolute difference is less * than or equal to {@code tolerance}. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingDoubleTolerance(double tolerance) { return usingConfig(config.usingDoubleTolerance(tolerance)); } /** * Compares double fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingDoubleToleranceForFields( double tolerance, int firstFieldNumber, int... rest) { return usingConfig( config.usingDoubleToleranceForFields(tolerance, asList(firstFieldNumber, rest))); } /** * Compares double fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingDoubleToleranceForFields( double tolerance, Iterable<Integer> fieldNumbers) { return usingConfig(config.usingDoubleToleranceForFields(tolerance, fieldNumbers)); } /** * Compares double fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingDoubleToleranceForFieldDescriptors( double tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.usingDoubleToleranceForFieldDescriptors( tolerance, asList(firstFieldDescriptor, rest))); } /** * Compares double fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingDoubleToleranceForFieldDescriptors( double tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.usingDoubleToleranceForFieldDescriptors(tolerance, fieldDescriptors)); } /** * Compares float fields as equal if they are both finite and their absolute difference is less * than or equal to {@code tolerance}. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingFloatTolerance(float tolerance) { return usingConfig(config.usingFloatTolerance(tolerance)); } /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingFloatToleranceForFields( float tolerance, int firstFieldNumber, int... rest) { return usingConfig( config.usingFloatToleranceForFields(tolerance, asList(firstFieldNumber, rest))); } /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingFloatToleranceForFields( float tolerance, Iterable<Integer> fieldNumbers) { return usingConfig(config.usingFloatToleranceForFields(tolerance, fieldNumbers)); } /** * Compares float fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingFloatToleranceForFieldDescriptors( float tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.usingFloatToleranceForFieldDescriptors( tolerance, asList(firstFieldDescriptor, rest))); } /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public ProtoFluentAssertion usingFloatToleranceForFieldDescriptors( float tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.usingFloatToleranceForFieldDescriptors(tolerance, fieldDescriptors)); } /** * Limits the comparison of Protocol buffers to the fields set in the expected proto(s). When * multiple protos are specified, the comparison is limited to the union of set fields in all the * expected protos. * * <p>The "expected proto(s)" are those passed to the void method at the end of the {@code * ProtoFluentAssertion} call-chain: For example, {@link #isEqualTo(Message)}, or {@link * #isNotEqualTo(Message)}. * * <p>Fields not set in the expected proto(s) are ignored. In particular, proto3 fields which have * their default values are ignored, as these are indistinguishable from unset fields. If you want * to assert that a proto3 message has certain fields with default values, you cannot use this * method. */ public ProtoFluentAssertion comparingExpectedFieldsOnly() { return usingConfig(config.comparingExpectedFieldsOnly()); } /** * Limits the comparison of Protocol buffers to the defined {@link FieldScope}. * * <p>This method is additive and has well-defined ordering semantics. If the invoking {@link * ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is * invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is * constrained to the intersection of {@link FieldScope}s {@code X} and {@code Y}. * * <p>By default, {@link ProtoFluentAssertion} is constrained to {@link FieldScopes#all()}, that * is, no fields are excluded from comparison. */ public ProtoFluentAssertion withPartialScope(FieldScope fieldScope) { return usingConfig(config.withPartialScope(checkNotNull(fieldScope, "fieldScope"))); } /** * Excludes the top-level message fields with the given tag numbers from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers * ignored. * * <p>If an invalid field number is supplied, the terminal comparison operation will throw a * runtime exception. */ public ProtoFluentAssertion ignoringFields(int firstFieldNumber, int... rest) { return ignoringFields(asList(firstFieldNumber, rest)); } /** * Excludes the top-level message fields with the given tag numbers from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers * ignored. * * <p>If an invalid field number is supplied, the terminal comparison operation will throw a * runtime exception. */ public ProtoFluentAssertion ignoringFields(Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringFields(fieldNumbers)); } /** * Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * descriptors are ignored, no matter where they occur in the tree. * * <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it * is silently ignored. */ public ProtoFluentAssertion ignoringFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return ignoringFieldDescriptors(asList(firstFieldDescriptor, rest)); } /** * Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * descriptors are ignored, no matter where they occur in the tree. * * <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it * is silently ignored. */ public ProtoFluentAssertion ignoringFieldDescriptors(Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.ignoringFieldDescriptors(fieldDescriptors)); } /** * Excludes all specific field paths under the argument {@link FieldScope} from the comparison. * * <p>This method is additive and has well-defined ordering semantics. If the invoking {@link * ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is * invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is * constrained to the subtraction of {@code X - Y}. * * <p>By default, {@link ProtoFluentAssertion} is constrained to {@link FieldScopes#all()}, that * is, no fields are excluded from comparison. */ public ProtoFluentAssertion ignoringFieldScope(FieldScope fieldScope) { return usingConfig(config.ignoringFieldScope(checkNotNull(fieldScope, "fieldScope"))); } /** * If set, in the event of a comparison failure, the error message printed will list only those * specific fields that did not match between the actual and expected values. Useful for very * large protocol buffers. * * <p>This a purely cosmetic setting, and it has no effect on the behavior of the test. */ public ProtoFluentAssertion reportingMismatchesOnly() { return usingConfig(config.reportingMismatchesOnly()); } /** * Specifies the {@link TypeRegistry} and {@link ExtensionRegistry} to use for {@link * com.google.protobuf.Any Any} messages. * * <p>To compare the value of an {@code Any} message, ProtoTruth looks in the given type registry * for a descriptor for the message's type URL: * * <ul> * <li>If ProtoTruth finds a descriptor, it unpacks the value and compares it against the * expected value, respecting any configuration methods used for the assertion. * <li>If ProtoTruth does not find a descriptor (or if the value can't be deserialized with the * descriptor), it compares the raw, serialized bytes of the expected and actual values. * </ul> * * <p>When ProtoTruth unpacks a value, it is parsing a serialized proto. That proto may contain * extensions. To look up those extensions, ProtoTruth uses the provided {@link * ExtensionRegistry}. * * @since 1.1 */ public ProtoFluentAssertion unpackingAnyUsing( TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry) { return usingConfig(config.unpackingAnyUsing(typeRegistry, extensionRegistry)); } private static boolean sameClassMessagesWithDifferentDescriptors( @Nullable Message actual, @Nullable Object expected) { if (actual == null || !(expected instanceof Message) || actual.getClass() != expected.getClass()) { return false; } return actual.getDescriptorForType() != ((Message) expected).getDescriptorForType(); } private static boolean notMessagesWithSameDescriptor( @Nullable Message actual, @Nullable Object expected) { if (actual != null && expected instanceof Message) { return actual.getDescriptorForType() != ((Message) expected).getDescriptorForType(); } return true; } @Override public void isEqualTo(@Nullable Object expected) { if (sameClassMessagesWithDifferentDescriptors(actual, expected)) { // This can happen with DynamicMessages, and it's very confusing if they both have the // same string. failWithoutActual( simpleFact("Not true that messages compare equal; they have different descriptors."), fact("expected", expected), fact("with descriptor", ((Message) expected).getDescriptorForType()), fact("but was", actual), fact("with descriptor", actual.getDescriptorForType())); } else if (notMessagesWithSameDescriptor(actual, expected)) { ProtobufToStringOutput.callWithTextFormat(() -> super.isEqualTo(expected)); } else { DiffResult diffResult = makeDifferencer((Message) expected).diffMessages(actual, (Message) expected); if (!diffResult.isMatched()) { failWithoutActual( simpleFact( "Not true that messages compare equal.\n" + diffResult.printToString(config.reportMismatchesOnly()))); } } } @Override public void isNotEqualTo(@Nullable Object expected) { if (notMessagesWithSameDescriptor(actual, expected)) { super.isNotEqualTo(expected); } else { DiffResult diffResult = makeDifferencer((Message) expected).diffMessages(actual, (Message) expected); if (diffResult.isMatched()) { failWithoutActual( simpleFact( "Not true that messages compare not equal.\n" + diffResult.printToString(config.reportMismatchesOnly()))); } } } @Override public void isEqualToDefaultInstance() { // Some tests assert things about the string representation of the proto, so we make sure that // stable text format is used here. ProtobufToStringOutput.callWithTextFormat(() -> super.isEqualToDefaultInstance()); } @Override public void hasAllRequiredFields() { if (!actual.isInitialized()) { failWithoutActual( simpleFact("expected to have all required fields set"), fact("but was missing", actual.findInitializationErrors()), fact("proto was", actualCustomStringRepresentationForProtoPackageMembersToCall())); } } private ProtoTruthMessageDifferencer makeDifferencer(Message expected) { return config .withExpectedMessages(Arrays.asList(expected)) .toMessageDifferencer(actual.getDescriptorForType()); } static final class ProtoFluentAssertionImpl implements ProtoFluentAssertion { private final ProtoSubject protoSubject; ProtoFluentAssertionImpl(ProtoSubject protoSubject) { this.protoSubject = protoSubject; } @Override public ProtoFluentAssertion ignoringFieldAbsence() { return protoSubject.ignoringFieldAbsence(); } @Override public ProtoFluentAssertion ignoringFieldAbsenceOfFields(int firstFieldNumber, int... rest) { return protoSubject.ignoringFieldAbsenceOfFields(firstFieldNumber, rest); } @Override public ProtoFluentAssertion ignoringFieldAbsenceOfFields(Iterable<Integer> fieldNumbers) { return protoSubject.ignoringFieldAbsenceOfFields(fieldNumbers); } @Override public ProtoFluentAssertion ignoringFieldAbsenceOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return protoSubject.ignoringFieldAbsenceOfFieldDescriptors(firstFieldDescriptor, rest); } @Override public ProtoFluentAssertion ignoringFieldAbsenceOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors) { return protoSubject.ignoringFieldAbsenceOfFieldDescriptors(fieldDescriptors); } @Override public ProtoFluentAssertion ignoringRepeatedFieldOrder() { return protoSubject.ignoringRepeatedFieldOrder(); } @Override public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFields( int firstFieldNumber, int... rest) { return protoSubject.ignoringRepeatedFieldOrderOfFields(firstFieldNumber, rest); } @Override public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFields(Iterable<Integer> fieldNumbers) { return protoSubject.ignoringRepeatedFieldOrderOfFields(fieldNumbers); } @Override public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return protoSubject.ignoringRepeatedFieldOrderOfFieldDescriptors(firstFieldDescriptor, rest); } @Override public ProtoFluentAssertion ignoringRepeatedFieldOrderOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors) { return protoSubject.ignoringRepeatedFieldOrderOfFieldDescriptors(fieldDescriptors); } @Override public ProtoFluentAssertion ignoringExtraRepeatedFieldElements() { return protoSubject.ignoringExtraRepeatedFieldElements(); } @Override public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFields( int firstFieldNumber, int... rest) { return protoSubject.ignoringExtraRepeatedFieldElementsOfFields(firstFieldNumber, rest); } @Override public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFields( Iterable<Integer> fieldNumbers) { return protoSubject.ignoringExtraRepeatedFieldElementsOfFields(fieldNumbers); } @Override public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return protoSubject.ignoringExtraRepeatedFieldElementsOfFieldDescriptors( firstFieldDescriptor, rest); } @Override public ProtoFluentAssertion ignoringExtraRepeatedFieldElementsOfFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors) { return protoSubject.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(fieldDescriptors); } @Override public ProtoFluentAssertion usingDoubleTolerance(double tolerance) { return protoSubject.usingDoubleTolerance(tolerance); } @Override public ProtoFluentAssertion usingDoubleToleranceForFields( double tolerance, int firstFieldNumber, int... rest) { return protoSubject.usingDoubleToleranceForFields(tolerance, firstFieldNumber, rest); } @Override public ProtoFluentAssertion usingDoubleToleranceForFields( double tolerance, Iterable<Integer> fieldNumbers) { return protoSubject.usingDoubleToleranceForFields(tolerance, fieldNumbers); } @Override public ProtoFluentAssertion usingDoubleToleranceForFieldDescriptors( double tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return protoSubject.usingDoubleToleranceForFieldDescriptors( tolerance, firstFieldDescriptor, rest); } @Override public ProtoFluentAssertion usingDoubleToleranceForFieldDescriptors( double tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return protoSubject.usingDoubleToleranceForFieldDescriptors(tolerance, fieldDescriptors); } @Override public ProtoFluentAssertion usingFloatTolerance(float tolerance) { return protoSubject.usingFloatTolerance(tolerance); } @Override public ProtoFluentAssertion usingFloatToleranceForFields( float tolerance, int firstFieldNumber, int... rest) { return protoSubject.usingFloatToleranceForFields(tolerance, firstFieldNumber, rest); } @Override public ProtoFluentAssertion usingFloatToleranceForFields( float tolerance, Iterable<Integer> fieldNumbers) { return protoSubject.usingFloatToleranceForFields(tolerance, fieldNumbers); } @Override public ProtoFluentAssertion usingFloatToleranceForFieldDescriptors( float tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return protoSubject.usingFloatToleranceForFieldDescriptors( tolerance, firstFieldDescriptor, rest); } @Override public ProtoFluentAssertion usingFloatToleranceForFieldDescriptors( float tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return protoSubject.usingFloatToleranceForFieldDescriptors(tolerance, fieldDescriptors); } @Override public ProtoFluentAssertion comparingExpectedFieldsOnly() { return protoSubject.comparingExpectedFieldsOnly(); } @Override public ProtoFluentAssertion withPartialScope(FieldScope fieldScope) { return protoSubject.withPartialScope(fieldScope); } @Override public ProtoFluentAssertion ignoringFields(int firstFieldNumber, int... rest) { return protoSubject.ignoringFields(firstFieldNumber, rest); } @Override public ProtoFluentAssertion ignoringFields(Iterable<Integer> fieldNumbers) { return protoSubject.ignoringFields(fieldNumbers); } @Override public ProtoFluentAssertion ignoringFieldDescriptors( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return protoSubject.ignoringFieldDescriptors(firstFieldDescriptor, rest); } @Override public ProtoFluentAssertion ignoringFieldDescriptors( Iterable<FieldDescriptor> fieldDescriptors) { return protoSubject.ignoringFieldDescriptors(fieldDescriptors); } @Override public ProtoFluentAssertion ignoringFieldScope(FieldScope fieldScope) { return protoSubject.ignoringFieldScope(fieldScope); } @Override public ProtoFluentAssertion reportingMismatchesOnly() { return protoSubject.reportingMismatchesOnly(); } @Override public ProtoFluentAssertion unpackingAnyUsing( TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry) { return protoSubject.unpackingAnyUsing(typeRegistry, extensionRegistry); } @Override public void isEqualTo(@Nullable Message expected) { protoSubject.isEqualTo(expected); } @Override public void isNotEqualTo(@Nullable Message expected) { protoSubject.isNotEqualTo(expected); } /** * Same as {@link #isEqualTo(Message)}, except it returns true on success and false on failure * without throwing any exceptions. */ boolean testIsEqualTo(@Nullable Message expected) { if (notMessagesWithSameDescriptor(protoSubject.actual, expected)) { return Objects.equals(protoSubject.actual, expected); } else { return protoSubject .makeDifferencer(expected) .diffMessages(protoSubject.actual, expected) .isMatched(); } } } }
googleapis/google-cloud-java
36,628
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListIndexesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/index_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for * [IndexService.ListIndexes][google.cloud.aiplatform.v1beta1.IndexService.ListIndexes]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListIndexesResponse} */ public final class ListIndexesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListIndexesResponse) ListIndexesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListIndexesResponse.newBuilder() to construct. private ListIndexesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListIndexesResponse() { indexes_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListIndexesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.IndexServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.IndexServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListIndexesResponse.class, com.google.cloud.aiplatform.v1beta1.ListIndexesResponse.Builder.class); } public static final int INDEXES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1beta1.Index> indexes_; /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1beta1.Index> getIndexesList() { return indexes_; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.IndexOrBuilder> getIndexesOrBuilderList() { return indexes_; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ @java.lang.Override public int getIndexesCount() { return indexes_.size(); } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Index getIndexes(int index) { return indexes_.get(index); } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.IndexOrBuilder getIndexesOrBuilder(int index) { return indexes_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListIndexesRequest.page_token][google.cloud.aiplatform.v1beta1.ListIndexesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListIndexesRequest.page_token][google.cloud.aiplatform.v1beta1.ListIndexesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < indexes_.size(); i++) { output.writeMessage(1, indexes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < indexes_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, indexes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListIndexesResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ListIndexesResponse other = (com.google.cloud.aiplatform.v1beta1.ListIndexesResponse) obj; if (!getIndexesList().equals(other.getIndexesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getIndexesCount() > 0) { hash = (37 * hash) + INDEXES_FIELD_NUMBER; hash = (53 * hash) + getIndexesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ListIndexesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [IndexService.ListIndexes][google.cloud.aiplatform.v1beta1.IndexService.ListIndexes]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListIndexesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListIndexesResponse) com.google.cloud.aiplatform.v1beta1.ListIndexesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.IndexServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.IndexServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListIndexesResponse.class, com.google.cloud.aiplatform.v1beta1.ListIndexesResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ListIndexesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (indexesBuilder_ == null) { indexes_ = java.util.Collections.emptyList(); } else { indexes_ = null; indexesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.IndexServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListIndexesResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListIndexesResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ListIndexesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListIndexesResponse build() { com.google.cloud.aiplatform.v1beta1.ListIndexesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListIndexesResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.ListIndexesResponse result = new com.google.cloud.aiplatform.v1beta1.ListIndexesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.ListIndexesResponse result) { if (indexesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { indexes_ = java.util.Collections.unmodifiableList(indexes_); bitField0_ = (bitField0_ & ~0x00000001); } result.indexes_ = indexes_; } else { result.indexes_ = indexesBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListIndexesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ListIndexesResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListIndexesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListIndexesResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.ListIndexesResponse.getDefaultInstance()) return this; if (indexesBuilder_ == null) { if (!other.indexes_.isEmpty()) { if (indexes_.isEmpty()) { indexes_ = other.indexes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureIndexesIsMutable(); indexes_.addAll(other.indexes_); } onChanged(); } } else { if (!other.indexes_.isEmpty()) { if (indexesBuilder_.isEmpty()) { indexesBuilder_.dispose(); indexesBuilder_ = null; indexes_ = other.indexes_; bitField0_ = (bitField0_ & ~0x00000001); indexesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getIndexesFieldBuilder() : null; } else { indexesBuilder_.addAllMessages(other.indexes_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1beta1.Index m = input.readMessage( com.google.cloud.aiplatform.v1beta1.Index.parser(), extensionRegistry); if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.add(m); } else { indexesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1beta1.Index> indexes_ = java.util.Collections.emptyList(); private void ensureIndexesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { indexes_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.Index>(indexes_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Index, com.google.cloud.aiplatform.v1beta1.Index.Builder, com.google.cloud.aiplatform.v1beta1.IndexOrBuilder> indexesBuilder_; /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Index> getIndexesList() { if (indexesBuilder_ == null) { return java.util.Collections.unmodifiableList(indexes_); } else { return indexesBuilder_.getMessageList(); } } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public int getIndexesCount() { if (indexesBuilder_ == null) { return indexes_.size(); } else { return indexesBuilder_.getCount(); } } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Index getIndexes(int index) { if (indexesBuilder_ == null) { return indexes_.get(index); } else { return indexesBuilder_.getMessage(index); } } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder setIndexes(int index, com.google.cloud.aiplatform.v1beta1.Index value) { if (indexesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexesIsMutable(); indexes_.set(index, value); onChanged(); } else { indexesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder setIndexes( int index, com.google.cloud.aiplatform.v1beta1.Index.Builder builderForValue) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.set(index, builderForValue.build()); onChanged(); } else { indexesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder addIndexes(com.google.cloud.aiplatform.v1beta1.Index value) { if (indexesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexesIsMutable(); indexes_.add(value); onChanged(); } else { indexesBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder addIndexes(int index, com.google.cloud.aiplatform.v1beta1.Index value) { if (indexesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexesIsMutable(); indexes_.add(index, value); onChanged(); } else { indexesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder addIndexes(com.google.cloud.aiplatform.v1beta1.Index.Builder builderForValue) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.add(builderForValue.build()); onChanged(); } else { indexesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder addIndexes( int index, com.google.cloud.aiplatform.v1beta1.Index.Builder builderForValue) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.add(index, builderForValue.build()); onChanged(); } else { indexesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder addAllIndexes( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.Index> values) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, indexes_); onChanged(); } else { indexesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder clearIndexes() { if (indexesBuilder_ == null) { indexes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { indexesBuilder_.clear(); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public Builder removeIndexes(int index) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.remove(index); onChanged(); } else { indexesBuilder_.remove(index); } return this; } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Index.Builder getIndexesBuilder(int index) { return getIndexesFieldBuilder().getBuilder(index); } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.IndexOrBuilder getIndexesOrBuilder(int index) { if (indexesBuilder_ == null) { return indexes_.get(index); } else { return indexesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.IndexOrBuilder> getIndexesOrBuilderList() { if (indexesBuilder_ != null) { return indexesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(indexes_); } } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Index.Builder addIndexesBuilder() { return getIndexesFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1beta1.Index.getDefaultInstance()); } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Index.Builder addIndexesBuilder(int index) { return getIndexesFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1beta1.Index.getDefaultInstance()); } /** * * * <pre> * List of indexes in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Index indexes = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Index.Builder> getIndexesBuilderList() { return getIndexesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Index, com.google.cloud.aiplatform.v1beta1.Index.Builder, com.google.cloud.aiplatform.v1beta1.IndexOrBuilder> getIndexesFieldBuilder() { if (indexesBuilder_ == null) { indexesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Index, com.google.cloud.aiplatform.v1beta1.Index.Builder, com.google.cloud.aiplatform.v1beta1.IndexOrBuilder>( indexes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); indexes_ = null; } return indexesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListIndexesRequest.page_token][google.cloud.aiplatform.v1beta1.ListIndexesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListIndexesRequest.page_token][google.cloud.aiplatform.v1beta1.ListIndexesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListIndexesRequest.page_token][google.cloud.aiplatform.v1beta1.ListIndexesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListIndexesRequest.page_token][google.cloud.aiplatform.v1beta1.ListIndexesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListIndexesRequest.page_token][google.cloud.aiplatform.v1beta1.ListIndexesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListIndexesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListIndexesResponse) private static final com.google.cloud.aiplatform.v1beta1.ListIndexesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListIndexesResponse(); } public static com.google.cloud.aiplatform.v1beta1.ListIndexesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListIndexesResponse> PARSER = new com.google.protobuf.AbstractParser<ListIndexesResponse>() { @java.lang.Override public ListIndexesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListIndexesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListIndexesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListIndexesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,644
java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/ListPolicyTagsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1/policytagmanager.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1; /** * * * <pre> * Response message for * [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.ListPolicyTagsResponse} */ public final class ListPolicyTagsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.ListPolicyTagsResponse) ListPolicyTagsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPolicyTagsResponse.newBuilder() to construct. private ListPolicyTagsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPolicyTagsResponse() { policyTags_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPolicyTagsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1_ListPolicyTagsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1_ListPolicyTagsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.ListPolicyTagsResponse.class, com.google.cloud.datacatalog.v1.ListPolicyTagsResponse.Builder.class); } public static final int POLICY_TAGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.datacatalog.v1.PolicyTag> policyTags_; /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.datacatalog.v1.PolicyTag> getPolicyTagsList() { return policyTags_; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.datacatalog.v1.PolicyTagOrBuilder> getPolicyTagsOrBuilderList() { return policyTags_; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ @java.lang.Override public int getPolicyTagsCount() { return policyTags_.size(); } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ @java.lang.Override public com.google.cloud.datacatalog.v1.PolicyTag getPolicyTags(int index) { return policyTags_.get(index); } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ @java.lang.Override public com.google.cloud.datacatalog.v1.PolicyTagOrBuilder getPolicyTagsOrBuilder(int index) { return policyTags_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token of the next results page. Empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Pagination token of the next results page. Empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policyTags_.size(); i++) { output.writeMessage(1, policyTags_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policyTags_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, policyTags_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1.ListPolicyTagsResponse)) { return super.equals(obj); } com.google.cloud.datacatalog.v1.ListPolicyTagsResponse other = (com.google.cloud.datacatalog.v1.ListPolicyTagsResponse) obj; if (!getPolicyTagsList().equals(other.getPolicyTagsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPolicyTagsCount() > 0) { hash = (37 * hash) + POLICY_TAGS_FIELD_NUMBER; hash = (53 * hash) + getPolicyTagsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1.ListPolicyTagsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.ListPolicyTagsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.ListPolicyTagsResponse) com.google.cloud.datacatalog.v1.ListPolicyTagsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1_ListPolicyTagsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1_ListPolicyTagsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.ListPolicyTagsResponse.class, com.google.cloud.datacatalog.v1.ListPolicyTagsResponse.Builder.class); } // Construct using com.google.cloud.datacatalog.v1.ListPolicyTagsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policyTagsBuilder_ == null) { policyTags_ = java.util.Collections.emptyList(); } else { policyTags_ = null; policyTagsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1.PolicyTagManagerProto .internal_static_google_cloud_datacatalog_v1_ListPolicyTagsResponse_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1.ListPolicyTagsResponse getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1.ListPolicyTagsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1.ListPolicyTagsResponse build() { com.google.cloud.datacatalog.v1.ListPolicyTagsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1.ListPolicyTagsResponse buildPartial() { com.google.cloud.datacatalog.v1.ListPolicyTagsResponse result = new com.google.cloud.datacatalog.v1.ListPolicyTagsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.datacatalog.v1.ListPolicyTagsResponse result) { if (policyTagsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policyTags_ = java.util.Collections.unmodifiableList(policyTags_); bitField0_ = (bitField0_ & ~0x00000001); } result.policyTags_ = policyTags_; } else { result.policyTags_ = policyTagsBuilder_.build(); } } private void buildPartial0(com.google.cloud.datacatalog.v1.ListPolicyTagsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1.ListPolicyTagsResponse) { return mergeFrom((com.google.cloud.datacatalog.v1.ListPolicyTagsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1.ListPolicyTagsResponse other) { if (other == com.google.cloud.datacatalog.v1.ListPolicyTagsResponse.getDefaultInstance()) return this; if (policyTagsBuilder_ == null) { if (!other.policyTags_.isEmpty()) { if (policyTags_.isEmpty()) { policyTags_ = other.policyTags_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePolicyTagsIsMutable(); policyTags_.addAll(other.policyTags_); } onChanged(); } } else { if (!other.policyTags_.isEmpty()) { if (policyTagsBuilder_.isEmpty()) { policyTagsBuilder_.dispose(); policyTagsBuilder_ = null; policyTags_ = other.policyTags_; bitField0_ = (bitField0_ & ~0x00000001); policyTagsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPolicyTagsFieldBuilder() : null; } else { policyTagsBuilder_.addAllMessages(other.policyTags_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.datacatalog.v1.PolicyTag m = input.readMessage( com.google.cloud.datacatalog.v1.PolicyTag.parser(), extensionRegistry); if (policyTagsBuilder_ == null) { ensurePolicyTagsIsMutable(); policyTags_.add(m); } else { policyTagsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.datacatalog.v1.PolicyTag> policyTags_ = java.util.Collections.emptyList(); private void ensurePolicyTagsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policyTags_ = new java.util.ArrayList<com.google.cloud.datacatalog.v1.PolicyTag>(policyTags_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datacatalog.v1.PolicyTag, com.google.cloud.datacatalog.v1.PolicyTag.Builder, com.google.cloud.datacatalog.v1.PolicyTagOrBuilder> policyTagsBuilder_; /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public java.util.List<com.google.cloud.datacatalog.v1.PolicyTag> getPolicyTagsList() { if (policyTagsBuilder_ == null) { return java.util.Collections.unmodifiableList(policyTags_); } else { return policyTagsBuilder_.getMessageList(); } } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public int getPolicyTagsCount() { if (policyTagsBuilder_ == null) { return policyTags_.size(); } else { return policyTagsBuilder_.getCount(); } } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public com.google.cloud.datacatalog.v1.PolicyTag getPolicyTags(int index) { if (policyTagsBuilder_ == null) { return policyTags_.get(index); } else { return policyTagsBuilder_.getMessage(index); } } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder setPolicyTags(int index, com.google.cloud.datacatalog.v1.PolicyTag value) { if (policyTagsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTagsIsMutable(); policyTags_.set(index, value); onChanged(); } else { policyTagsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder setPolicyTags( int index, com.google.cloud.datacatalog.v1.PolicyTag.Builder builderForValue) { if (policyTagsBuilder_ == null) { ensurePolicyTagsIsMutable(); policyTags_.set(index, builderForValue.build()); onChanged(); } else { policyTagsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder addPolicyTags(com.google.cloud.datacatalog.v1.PolicyTag value) { if (policyTagsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTagsIsMutable(); policyTags_.add(value); onChanged(); } else { policyTagsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder addPolicyTags(int index, com.google.cloud.datacatalog.v1.PolicyTag value) { if (policyTagsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTagsIsMutable(); policyTags_.add(index, value); onChanged(); } else { policyTagsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder addPolicyTags( com.google.cloud.datacatalog.v1.PolicyTag.Builder builderForValue) { if (policyTagsBuilder_ == null) { ensurePolicyTagsIsMutable(); policyTags_.add(builderForValue.build()); onChanged(); } else { policyTagsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder addPolicyTags( int index, com.google.cloud.datacatalog.v1.PolicyTag.Builder builderForValue) { if (policyTagsBuilder_ == null) { ensurePolicyTagsIsMutable(); policyTags_.add(index, builderForValue.build()); onChanged(); } else { policyTagsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder addAllPolicyTags( java.lang.Iterable<? extends com.google.cloud.datacatalog.v1.PolicyTag> values) { if (policyTagsBuilder_ == null) { ensurePolicyTagsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, policyTags_); onChanged(); } else { policyTagsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder clearPolicyTags() { if (policyTagsBuilder_ == null) { policyTags_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policyTagsBuilder_.clear(); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public Builder removePolicyTags(int index) { if (policyTagsBuilder_ == null) { ensurePolicyTagsIsMutable(); policyTags_.remove(index); onChanged(); } else { policyTagsBuilder_.remove(index); } return this; } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public com.google.cloud.datacatalog.v1.PolicyTag.Builder getPolicyTagsBuilder(int index) { return getPolicyTagsFieldBuilder().getBuilder(index); } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public com.google.cloud.datacatalog.v1.PolicyTagOrBuilder getPolicyTagsOrBuilder(int index) { if (policyTagsBuilder_ == null) { return policyTags_.get(index); } else { return policyTagsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public java.util.List<? extends com.google.cloud.datacatalog.v1.PolicyTagOrBuilder> getPolicyTagsOrBuilderList() { if (policyTagsBuilder_ != null) { return policyTagsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policyTags_); } } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public com.google.cloud.datacatalog.v1.PolicyTag.Builder addPolicyTagsBuilder() { return getPolicyTagsFieldBuilder() .addBuilder(com.google.cloud.datacatalog.v1.PolicyTag.getDefaultInstance()); } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public com.google.cloud.datacatalog.v1.PolicyTag.Builder addPolicyTagsBuilder(int index) { return getPolicyTagsFieldBuilder() .addBuilder(index, com.google.cloud.datacatalog.v1.PolicyTag.getDefaultInstance()); } /** * * * <pre> * The policy tags that belong to the taxonomy. * </pre> * * <code>repeated .google.cloud.datacatalog.v1.PolicyTag policy_tags = 1;</code> */ public java.util.List<com.google.cloud.datacatalog.v1.PolicyTag.Builder> getPolicyTagsBuilderList() { return getPolicyTagsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datacatalog.v1.PolicyTag, com.google.cloud.datacatalog.v1.PolicyTag.Builder, com.google.cloud.datacatalog.v1.PolicyTagOrBuilder> getPolicyTagsFieldBuilder() { if (policyTagsBuilder_ == null) { policyTagsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.datacatalog.v1.PolicyTag, com.google.cloud.datacatalog.v1.PolicyTag.Builder, com.google.cloud.datacatalog.v1.PolicyTagOrBuilder>( policyTags_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policyTags_ = null; } return policyTagsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token of the next results page. Empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Pagination token of the next results page. Empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Pagination token of the next results page. Empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Pagination token of the next results page. Empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Pagination token of the next results page. Empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.ListPolicyTagsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.ListPolicyTagsResponse) private static final com.google.cloud.datacatalog.v1.ListPolicyTagsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.ListPolicyTagsResponse(); } public static com.google.cloud.datacatalog.v1.ListPolicyTagsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPolicyTagsResponse> PARSER = new com.google.protobuf.AbstractParser<ListPolicyTagsResponse>() { @java.lang.Override public ListPolicyTagsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPolicyTagsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPolicyTagsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1.ListPolicyTagsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ozone
36,774
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestOMDbCheckpointServlet.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.om; import static org.apache.hadoop.hdds.recon.ReconConfig.ConfigStrings.OZONE_RECON_KERBEROS_PRINCIPAL_KEY; import static org.apache.hadoop.hdds.utils.HddsServerUtil.OZONE_RATIS_SNAPSHOT_COMPLETE_FLAG_NAME; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ACL_ENABLED; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS_WILDCARD; import static org.apache.hadoop.ozone.OzoneConsts.DB_COMPACTION_SST_BACKUP_DIR; import static org.apache.hadoop.ozone.OzoneConsts.MULTIPART_FORM_DATA_BOUNDARY; import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_NAME; import static org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX; import static org.apache.hadoop.ozone.OzoneConsts.OM_SNAPSHOT_DIFF_DIR; import static org.apache.hadoop.ozone.OzoneConsts.OM_SNAPSHOT_DIR; import static org.apache.hadoop.ozone.OzoneConsts.OZONE_DB_CHECKPOINT_INCLUDE_SNAPSHOT_DATA; import static org.apache.hadoop.ozone.OzoneConsts.OZONE_DB_CHECKPOINT_REQUEST_FLUSH; import static org.apache.hadoop.ozone.OzoneConsts.OZONE_DB_CHECKPOINT_REQUEST_TO_EXCLUDE_SST; import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_HTTP_AUTH_TYPE; import static org.apache.hadoop.ozone.om.OmSnapshotManager.OM_HARDLINK_FILE; import static org.apache.hadoop.ozone.om.OmSnapshotManager.getSnapshotPath; import static org.apache.hadoop.ozone.om.snapshot.OmSnapshotUtils.DATA_PREFIX; import static org.apache.hadoop.ozone.om.snapshot.OmSnapshotUtils.DATA_SUFFIX; import static org.apache.hadoop.ozone.om.snapshot.OmSnapshotUtils.truncateFileName; import static org.apache.ozone.rocksdiff.RocksDBCheckpointDiffer.COMPACTION_LOG_FILE_NAME_SUFFIX; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyBoolean; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.collect.Sets; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.servlet.ServletContext; import javax.servlet.ServletInputStream; import javax.servlet.ServletOutputStream; import javax.servlet.WriteListener; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdds.client.ReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationFactor; import org.apache.hadoop.hdds.client.ReplicationType; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.utils.IOUtils; import org.apache.hadoop.hdds.utils.db.DBCheckpoint; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.TestDataUtil; import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.client.OzoneClient; import org.apache.hadoop.ozone.lock.BootstrapStateHandler; import org.apache.hadoop.ozone.om.helpers.SnapshotInfo; import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol; import org.apache.hadoop.security.UserGroupInformation; import org.apache.ozone.test.GenericTestUtils; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; /** * Class used for testing the OM DB Checkpoint provider servlet. */ public class TestOMDbCheckpointServlet { public static final String JAVA_IO_TMPDIR = "java.io.tmpdir"; private OzoneConfiguration conf; private File tempFile; private ServletOutputStream servletOutputStream; private MiniOzoneCluster cluster = null; private OzoneClient client; private OzoneManager om; private OMMetrics omMetrics = null; private HttpServletRequest requestMock = null; private HttpServletResponse responseMock = null; private OMDBCheckpointServlet omDbCheckpointServletMock = null; private File metaDir; private String snapshotDirName; private String snapshotDirName2; private Path compactionDirPath; private DBCheckpoint dbCheckpoint; @TempDir private Path folder; private static final String FABRICATED_FILE_NAME = "fabricatedFile.sst"; private static final AtomicInteger COUNTER = new AtomicInteger(); @BeforeEach void init() throws Exception { conf = new OzoneConfiguration(); } @AfterEach void shutdown() { IOUtils.closeQuietly(client, cluster); } private void setupCluster() throws Exception { cluster = MiniOzoneCluster.newBuilder(conf) .setNumDatanodes(1) .build(); cluster.waitForClusterToBeReady(); client = cluster.newClient(); om = cluster.getOzoneManager(); omMetrics = om.getMetrics(); } private void setupMocks() throws Exception { final Path tempPath = folder.resolve("temp" + COUNTER.incrementAndGet() + ".tar"); tempFile = tempPath.toFile(); servletOutputStream = new ServletOutputStream() { private final OutputStream fileOutputStream = Files.newOutputStream(tempPath); @Override public boolean isReady() { return true; } @Override public void setWriteListener(WriteListener writeListener) { } @Override public void close() throws IOException { fileOutputStream.close(); super.close(); } @Override public void write(int b) throws IOException { fileOutputStream.write(b); } }; omDbCheckpointServletMock = mock(OMDBCheckpointServlet.class); BootstrapStateHandler.Lock lock = new OMDBCheckpointServlet.Lock(om); doCallRealMethod().when(omDbCheckpointServletMock).init(); assertNull( doCallRealMethod().when(omDbCheckpointServletMock).getDbStore()); requestMock = mock(HttpServletRequest.class); // Return current user short name when asked when(requestMock.getRemoteUser()) .thenReturn(UserGroupInformation.getCurrentUser().getShortUserName()); responseMock = mock(HttpServletResponse.class); ServletContext servletContextMock = mock(ServletContext.class); when(omDbCheckpointServletMock.getServletContext()) .thenReturn(servletContextMock); when(servletContextMock.getAttribute(OzoneConsts.OM_CONTEXT_ATTRIBUTE)) .thenReturn(om); when(requestMock.getParameter(OZONE_DB_CHECKPOINT_REQUEST_FLUSH)) .thenReturn("true"); doCallRealMethod().when(omDbCheckpointServletMock).doGet(requestMock, responseMock); doCallRealMethod().when(omDbCheckpointServletMock).doPost(requestMock, responseMock); doCallRealMethod().when(omDbCheckpointServletMock) .writeDbDataToStream(any(), any(), any(), any(), any()); when(omDbCheckpointServletMock.getBootstrapStateLock()) .thenReturn(lock); doCallRealMethod().when(omDbCheckpointServletMock).getCheckpoint(any(), anyBoolean()); doCallRealMethod().when(omDbCheckpointServletMock) .processMetadataSnapshotRequest(any(), any(), anyBoolean(), anyBoolean()); } @Test void testWithoutACL() throws Exception { conf.setBoolean(OZONE_ACL_ENABLED, false); conf.set(OZONE_ADMINISTRATORS, OZONE_ADMINISTRATORS_WILDCARD); setupCluster(); testBootstrapLocking(); testEndpoint("POST"); testEndpoint("GET"); testDoPostWithInvalidContentType(); prepSnapshotData(); testWriteDbDataWithoutOmSnapshot(); testWriteDbDataToStream(); testWriteDbDataWithToExcludeFileList(); } private void testEndpoint(String method) throws Exception { setupMocks(); doCallRealMethod().when(omDbCheckpointServletMock).initialize( om.getMetadataManager().getStore(), om.getMetrics().getDBCheckpointMetrics(), om.getAclsEnabled(), om.getOmAdminUsernames(), om.getOmAdminGroups(), om.isSpnegoEnabled()); doNothing().when(responseMock).setContentType("application/x-tar"); doNothing().when(responseMock).setHeader(anyString(), anyString()); Set<String> toExcludeList = new HashSet<>(); toExcludeList.add("sstFile1.sst"); toExcludeList.add("sstFile2.sst"); setupHttpMethod(method, toExcludeList); when(responseMock.getOutputStream()).thenReturn(servletOutputStream); omDbCheckpointServletMock.init(); long initialCheckpointCount = omMetrics.getDBCheckpointMetrics().getNumCheckpoints(); doEndpoint(method); assertThat(tempFile.length()).isGreaterThan(0); assertThat(omMetrics.getDBCheckpointMetrics().getLastCheckpointCreationTimeTaken()) .isGreaterThan(0); assertThat(omMetrics.getDBCheckpointMetrics().getLastCheckpointStreamingTimeTaken()) .isGreaterThan(0); assertThat(omMetrics.getDBCheckpointMetrics().getNumCheckpoints()) .isGreaterThan(initialCheckpointCount); verify(omDbCheckpointServletMock).writeDbDataToStream(any(), any(), any(), eq(toExcludeList), any()); } private void testDoPostWithInvalidContentType() throws Exception { setupMocks(); doCallRealMethod().when(omDbCheckpointServletMock).initialize( om.getMetadataManager().getStore(), om.getMetrics().getDBCheckpointMetrics(), om.getAclsEnabled(), om.getOmAdminUsernames(), om.getOmAdminGroups(), om.isSpnegoEnabled()); when(requestMock.getContentType()).thenReturn("application/json"); doNothing().when(responseMock).setContentType("application/x-tar"); doNothing().when(responseMock).setHeader(anyString(), anyString()); when(responseMock.getOutputStream()).thenReturn(servletOutputStream); omDbCheckpointServletMock.init(); omDbCheckpointServletMock.doPost(requestMock, responseMock); verify(responseMock).setStatus(HttpServletResponse.SC_BAD_REQUEST); } @Test void testSpnegoEnabled() throws Exception { conf.setBoolean(OZONE_ACL_ENABLED, true); conf.set(OZONE_ADMINISTRATORS, ""); conf.set(OZONE_OM_HTTP_AUTH_TYPE, "kerberos"); conf.set(OZONE_RECON_KERBEROS_PRINCIPAL_KEY, "recon/host1@REALM"); setupCluster(); testSpnegoEnabled("POST"); testSpnegoEnabled("GET"); } private void testSpnegoEnabled(String method) throws Exception { setupMocks(); Collection<String> allowedUsers = new LinkedHashSet<>(om.getOmAdminUsernames()); allowedUsers.add("recon"); doCallRealMethod().when(omDbCheckpointServletMock).initialize( om.getMetadataManager().getStore(), om.getMetrics().getDBCheckpointMetrics(), om.getAclsEnabled(), allowedUsers, Collections.emptySet(), om.isSpnegoEnabled()); omDbCheckpointServletMock.init(); setupHttpMethod(method, new ArrayList<>()); doEndpoint(method); // Response status should be set to 403 Forbidden since there was no user // principal set in the request verify(responseMock, times(1)).setStatus(HttpServletResponse.SC_FORBIDDEN); // Set the principal to DN in request // This should also get denied since only OM and recon // users should be granted access to the servlet Principal userPrincipalMock = mock(Principal.class); when(userPrincipalMock.getName()).thenReturn("dn/localhost@REALM"); when(requestMock.getUserPrincipal()).thenReturn(userPrincipalMock); doEndpoint(method); // Verify that the Response status is set to 403 again for DN user. verify(responseMock, times(2)).setStatus(HttpServletResponse.SC_FORBIDDEN); // Now, set the principal to recon in request when(userPrincipalMock.getName()).thenReturn("recon/localhost@REALM"); when(requestMock.getUserPrincipal()).thenReturn(userPrincipalMock); when(responseMock.getOutputStream()).thenReturn(servletOutputStream); doEndpoint(method); // Recon user should be able to access the servlet and download the // snapshot assertThat(tempFile.length()).isGreaterThan(0); } private void testWriteDbDataToStream() throws Exception { setupMocks(); // Set http param to include snapshot data. when(requestMock.getParameter(OZONE_DB_CHECKPOINT_INCLUDE_SNAPSHOT_DATA)) .thenReturn("true"); // Create a "spy" dbstore keep track of the checkpoint. DBStore dbStore = om.getMetadataManager().getStore(); DBStore spyDbStore = spy(dbStore); int metaDirLength = metaDir.toString().length() + 1; String compactionLogDir = dbStore. getRocksDBCheckpointDiffer().getCompactionLogDir(); String sstBackupDir = dbStore. getRocksDBCheckpointDiffer().getSSTBackupDir(); // Create files to be copied from the compaction pause // temp directories so we can confirm they are correctly // copied. The unexpected files should NOT be copied. Path expectedLog = Paths.get(compactionLogDir, "expected" + COMPACTION_LOG_FILE_NAME_SUFFIX); String expectedLogStr = truncateFileName(metaDirLength, expectedLog); Path expectedSst = Paths.get(sstBackupDir, "expected.sst"); String expectedSstStr = truncateFileName(metaDirLength, expectedSst); // put "expected" fabricated files onto the fs before the files get // copied to the temp dir. Files.write(expectedLog, "fabricatedData".getBytes(StandardCharsets.UTF_8)); Files.write(expectedSst, "fabricatedData".getBytes(StandardCharsets.UTF_8)); AtomicReference<DBCheckpoint> realCheckpoint = new AtomicReference<>(); when(spyDbStore.getCheckpoint(true)).thenAnswer(b -> { DBCheckpoint checkpoint = spy(dbStore.getCheckpoint(true)); // Don't delete the checkpoint, because we need to compare it // with the snapshot data. doNothing().when(checkpoint).cleanupCheckpoint(); realCheckpoint.set(checkpoint); return checkpoint; }); // Init the mock with the spyDbstore doCallRealMethod().when(omDbCheckpointServletMock).initialize( any(), any(), eq(false), any(), any(), eq(false)); omDbCheckpointServletMock.initialize( spyDbStore, om.getMetrics().getDBCheckpointMetrics(), false, om.getOmAdminUsernames(), om.getOmAdminGroups(), false); // Get the tarball. when(responseMock.getOutputStream()).thenReturn(servletOutputStream); long tmpHardLinkFileCount = tmpHardLinkFileCount(); omDbCheckpointServletMock.doGet(requestMock, responseMock); assertEquals(tmpHardLinkFileCount, tmpHardLinkFileCount()); dbCheckpoint = realCheckpoint.get(); // Untar the file into a temp folder to be examined. String testDirName = folder.resolve("testDir").toString(); int testDirLength = testDirName.length() + 1; String newDbDirName = testDirName + OM_KEY_PREFIX + OM_DB_NAME; int newDbDirLength = newDbDirName.length() + 1; File newDbDir = new File(newDbDirName); assertTrue(newDbDir.mkdirs()); FileUtil.unTar(tempFile, newDbDir); // Move snapshot dir to correct location. assertTrue(new File(newDbDirName, OM_SNAPSHOT_DIR) .renameTo(new File(newDbDir.getParent(), OM_SNAPSHOT_DIR))); // Confirm the checkpoint directories match, (after remove extras). Path checkpointLocation = dbCheckpoint.getCheckpointLocation(); Set<String> initialCheckpointSet = getFiles(checkpointLocation, checkpointLocation.toString().length() + 1); Path finalCheckpointLocation = Paths.get(newDbDirName); Set<String> finalCheckpointSet = getFiles(finalCheckpointLocation, newDbDirLength); assertThat(finalCheckpointSet).withFailMessage("hardlink file exists in checkpoint dir") .contains(OM_HARDLINK_FILE); finalCheckpointSet.remove(OM_HARDLINK_FILE); assertEquals(initialCheckpointSet, finalCheckpointSet); String shortSnapshotLocation = truncateFileName(metaDirLength, Paths.get(snapshotDirName)); String shortSnapshotLocation2 = truncateFileName(metaDirLength, Paths.get(snapshotDirName2)); String shortCompactionDirLocation = truncateFileName(metaDirLength, compactionDirPath); Set<String> finalFullSet = getFiles(Paths.get(testDirName, OM_SNAPSHOT_DIR), testDirLength); // Check each line in the hard link file. List<String> fabricatedLinkLines = new ArrayList<>(); try (Stream<String> lines = Files.lines(Paths.get(newDbDirName, OM_HARDLINK_FILE))) { for (String line : lines.collect(Collectors.toList())) { assertFalse(line.contains("CURRENT"), "CURRENT file is not a hard link"); if (line.contains(FABRICATED_FILE_NAME)) { fabricatedLinkLines.add(line); } else { checkLine(shortSnapshotLocation, shortSnapshotLocation2, line); // add links to the final set finalFullSet.add(line.split("\t")[0]); } } } Set<String> directories = Sets.newHashSet( shortSnapshotLocation, shortSnapshotLocation2, shortCompactionDirLocation); checkFabricatedLines(directories, fabricatedLinkLines, testDirName); Set<String> initialFullSet = getFiles(Paths.get(metaDir.toString(), OM_SNAPSHOT_DIR), metaDirLength); assertThat(finalFullSet).contains(expectedLogStr); assertThat(finalFullSet).contains(expectedSstStr); assertEquals(initialFullSet, finalFullSet, "expected snapshot files not found"); } private static long tmpHardLinkFileCount() throws IOException { Path tmpDirPath = Paths.get(System.getProperty(JAVA_IO_TMPDIR)); try (Stream<Path> tmpFiles = Files.list(tmpDirPath)) { return tmpFiles .filter(path -> { String regex = DATA_PREFIX + ".*" + DATA_SUFFIX; return path.getFileName().toString().matches(regex); }) .count(); } } private void testWriteDbDataWithoutOmSnapshot() throws Exception { setupMocks(); doCallRealMethod().when(omDbCheckpointServletMock).initialize( any(), any(), anyBoolean(), any(), any(), anyBoolean()); omDbCheckpointServletMock.init(); // Set http param to exclude snapshot data. when(requestMock.getParameter(OZONE_DB_CHECKPOINT_INCLUDE_SNAPSHOT_DATA)) .thenReturn(null); // Get the tarball. Path tmpdir = folder.resolve("bootstrapData"); try (OutputStream fileOutputStream = Files.newOutputStream(tempFile.toPath())) { omDbCheckpointServletMock.writeDbDataToStream(dbCheckpoint, requestMock, fileOutputStream, new HashSet<>(), tmpdir); } // Untar the file into a temp folder to be examined. String testDirName = folder.resolve("testDir").toString(); int testDirLength = testDirName.length() + 1; FileUtil.unTar(tempFile, new File(testDirName)); // Confirm the checkpoint directories match. Path checkpointLocation = dbCheckpoint.getCheckpointLocation(); Set<String> initialCheckpointSet = getFiles(checkpointLocation, checkpointLocation.toString().length() + 1); Path finalCheckpointLocation = Paths.get(testDirName); Set<String> finalCheckpointSet = getFiles(finalCheckpointLocation, testDirLength); assertEquals(initialCheckpointSet, finalCheckpointSet); } private void testWriteDbDataWithToExcludeFileList() throws Exception { setupMocks(); doCallRealMethod().when(omDbCheckpointServletMock).initialize( any(), any(), anyBoolean(), any(), any(), anyBoolean()); omDbCheckpointServletMock.init(); File dummyFile = new File(dbCheckpoint.getCheckpointLocation().toString(), "dummy.sst"); try (OutputStreamWriter writer = new OutputStreamWriter( Files.newOutputStream(dummyFile.toPath()), StandardCharsets.UTF_8)) { writer.write("Dummy data."); } assertTrue(dummyFile.exists()); Set<String> toExcludeList = new HashSet<>(); toExcludeList.add(dummyFile.getName()); // Set http param to exclude snapshot data. when(requestMock.getParameter(OZONE_DB_CHECKPOINT_INCLUDE_SNAPSHOT_DATA)) .thenReturn(null); // Get the tarball. Path tmpdir = folder.resolve("bootstrapData"); try (OutputStream fileOutputStream = Files.newOutputStream(tempFile.toPath())) { omDbCheckpointServletMock.writeDbDataToStream(dbCheckpoint, requestMock, fileOutputStream, toExcludeList, tmpdir); } // Untar the file into a temp folder to be examined. String testDirName = folder.resolve("testDir").toString(); int testDirLength = testDirName.length() + 1; FileUtil.unTar(tempFile, new File(testDirName)); // Confirm the checkpoint directories match. Path checkpointLocation = dbCheckpoint.getCheckpointLocation(); Set<String> initialCheckpointSet = getFiles(checkpointLocation, checkpointLocation.toString().length() + 1); Path finalCheckpointLocation = Paths.get(testDirName); Set<String> finalCheckpointSet = getFiles(finalCheckpointLocation, testDirLength); initialCheckpointSet.removeAll(finalCheckpointSet); assertThat(initialCheckpointSet).contains(dummyFile.getName()); } /** * Calls endpoint in regards to parametrized HTTP method. */ private void doEndpoint(String method) { if (method.equals("POST")) { omDbCheckpointServletMock.doPost(requestMock, responseMock); } else { omDbCheckpointServletMock.doGet(requestMock, responseMock); } } /** * Setups HTTP method details depending on parametrized HTTP method. * * @param toExcludeList SST file names to be excluded. * @throws IOException */ private void setupHttpMethod(String method, Collection <String> toExcludeList) throws IOException { if (method.equals("POST")) { setupPostMethod(toExcludeList); } else { setupGetMethod(toExcludeList); } } /** * Setups details for HTTP POST request. * @param toExcludeList SST file names to be excluded. * @throws IOException */ private void setupPostMethod(Collection<String> toExcludeList) throws IOException { when(requestMock.getMethod()).thenReturn("POST"); when(requestMock.getContentType()).thenReturn("multipart/form-data; " + "boundary=" + MULTIPART_FORM_DATA_BOUNDARY); // Generate form data String crNl = "\r\n"; String contentDisposition = "Content-Disposition: form-data; name=\"" + OZONE_DB_CHECKPOINT_REQUEST_TO_EXCLUDE_SST + "[]\"" + crNl + crNl; String boundary = "--" + MULTIPART_FORM_DATA_BOUNDARY; String endBoundary = boundary + "--" + crNl; StringBuilder sb = new StringBuilder(); toExcludeList.forEach(sfn -> { sb.append(boundary).append(crNl); sb.append(contentDisposition); sb.append(sfn).append(crNl); }); sb.append(endBoundary); // Use generated form data as input stream to the HTTP request InputStream input = new ByteArrayInputStream( sb.toString().getBytes(StandardCharsets.UTF_8)); ServletInputStream inputStream = mock(ServletInputStream.class); when(requestMock.getInputStream()).thenReturn(inputStream); when(inputStream.read(any(byte[].class), anyInt(), anyInt())) .thenAnswer(invocation -> { byte[] buffer = invocation.getArgument(0); int offset = invocation.getArgument(1); int length = invocation.getArgument(2); return input.read(buffer, offset, length); }); } /** * Setups details for HTTP GET request. * @param toExcludeList SST file names to be excluded. */ private void setupGetMethod(Collection<String> toExcludeList) { when(requestMock.getMethod()).thenReturn("GET"); when(requestMock .getParameterValues(OZONE_DB_CHECKPOINT_REQUEST_TO_EXCLUDE_SST)) .thenReturn(toExcludeList.toArray(new String[0])); } private void prepSnapshotData() throws Exception { metaDir = OMStorage.getOmDbDir(conf); OzoneBucket bucket = TestDataUtil .createVolumeAndBucket(client); // Create dummy keys for snapshotting. TestDataUtil.createKey(bucket, UUID.randomUUID().toString(), ReplicationConfig .fromTypeAndFactor(ReplicationType.RATIS, ReplicationFactor.ONE), "content".getBytes(StandardCharsets.UTF_8)); TestDataUtil.createKey(bucket, UUID.randomUUID().toString(), ReplicationConfig .fromTypeAndFactor(ReplicationType.RATIS, ReplicationFactor.ONE), "content".getBytes(StandardCharsets.UTF_8)); snapshotDirName = createSnapshot(bucket.getVolumeName(), bucket.getName()); snapshotDirName2 = createSnapshot(bucket.getVolumeName(), bucket.getName()); // Create dummy snapshot to make sure it is not included. Path fabricatedSnapshot = Paths.get( new File(snapshotDirName).getParent(), "fabricatedSnapshot"); assertTrue(fabricatedSnapshot.toFile().mkdirs()); assertTrue(Paths.get(fabricatedSnapshot.toString(), FABRICATED_FILE_NAME).toFile().createNewFile()); // Create fabricated links to snapshot dirs // to confirm that links are recognized even if // they don't point to the checkpoint directory. Path fabricatedFile = Paths.get(snapshotDirName, FABRICATED_FILE_NAME); Path fabricatedLink = Paths.get(snapshotDirName2, FABRICATED_FILE_NAME); Files.write(fabricatedFile, "fabricatedData".getBytes(StandardCharsets.UTF_8)); Files.createLink(fabricatedLink, fabricatedFile); // Simulate links from the compaction dir. compactionDirPath = Paths.get(metaDir.toString(), OM_SNAPSHOT_DIFF_DIR, DB_COMPACTION_SST_BACKUP_DIR); Path fabricatedLink2 = Paths.get(compactionDirPath.toString(), FABRICATED_FILE_NAME); Files.createLink(fabricatedLink2, fabricatedFile); Path currentFile = Paths.get(metaDir.toString(), OM_DB_NAME, "CURRENT"); Path currentLink = Paths.get(compactionDirPath.toString(), "CURRENT"); Files.createLink(currentLink, currentFile); dbCheckpoint = om.getMetadataManager() .getStore() .getCheckpoint(true); } private String createSnapshot(String vname, String bname) throws IOException, InterruptedException, TimeoutException { String snapshotName = UUID.randomUUID().toString(); OzoneManagerProtocol writeClient = client.getObjectStore() .getClientProxy().getOzoneManagerClient(); writeClient.createSnapshot(vname, bname, snapshotName); SnapshotInfo snapshotInfo = om.getMetadataManager().getSnapshotInfoTable() .get(SnapshotInfo.getTableKey(vname, bname, snapshotName)); String snapshotPath = getSnapshotPath(conf, snapshotInfo) + OM_KEY_PREFIX; GenericTestUtils.waitFor(() -> new File(snapshotPath).exists(), 100, 30000); return snapshotPath; } private Set<String> getFiles(Path path, int truncateLength) throws IOException { return getFiles(path, truncateLength, new HashSet<>()); } // Get all files below path, recursively, (skipping fabricated files, archive directory in rocksdb). private Set<String> getFiles(Path path, int truncateLength, Set<String> fileSet) throws IOException { try (Stream<Path> files = Files.list(path)) { for (Path file : files.collect(Collectors.toList())) { if (file.toFile().isDirectory()) { getFiles(file, truncateLength, fileSet); } String filename = String.valueOf(file.getFileName()); Path parentDir = file.getParent(); String parentFileName = parentDir == null ? "null" : parentDir.toFile().getName(); if (!filename.startsWith("fabricated") && !filename.startsWith(OZONE_RATIS_SNAPSHOT_COMPLETE_FLAG_NAME) && !(filename.equals("archive") && parentFileName.startsWith("om.db"))) { fileSet.add(truncateFileName(truncateLength, file)); } } } return fileSet; } /** * Confirm fabricated link lines in hardlink file are properly * formatted: "dir1/fabricatedFile dir2/fabricatedFile". * * The "fabricated" files/links are ones I've created by hand to * fully test the code, (as opposed to the "natural" files/links * created by the create snapshot process). * * @param directories Possible directories for the links to exist in. * @param lines Text lines defining the link paths. * @param testDirName Name of test directory. */ private void checkFabricatedLines(Set<String> directories, List<String> lines, String testDirName) { // find the real file String realDir = null; for (String dir: directories) { if (Paths.get(testDirName, dir, FABRICATED_FILE_NAME).toFile().exists()) { assertNull(realDir, "Exactly one copy of the fabricated file exists in the tarball"); realDir = dir; } } assertNotNull(realDir, "real directory found"); directories.remove(realDir); Iterator<String> directoryIterator = directories.iterator(); String dir0 = directoryIterator.next(); String dir1 = directoryIterator.next(); assertNotEquals("link directories are different", dir0, dir1); for (String line : lines) { String[] files = line.split("\t"); assertTrue( files[0].startsWith(dir0) || files[0].startsWith(dir1), "fabricated entry contains valid first directory: " + line); assertTrue(files[1].startsWith(realDir), "fabricated entry contains correct real directory: " + line); Path path0 = Paths.get(files[0]); Path path1 = Paths.get(files[1]); assertEquals(FABRICATED_FILE_NAME, String.valueOf(path0.getFileName()), "fabricated entries contains correct file name: " + line); assertEquals(FABRICATED_FILE_NAME, String.valueOf(path1.getFileName()), "fabricated entries contains correct file name: " + line); } } // Validates line in hard link file. should look something like: // "dir1/x.sst x.sst". private void checkLine(String shortSnapshotLocation, String shortSnapshotLocation2, String line) { String[] files = line.split("\t"); assertTrue(files[0].startsWith(shortSnapshotLocation) || files[0].startsWith(shortSnapshotLocation2), "hl entry starts with valid snapshot dir: " + line); String file0 = files[0].substring(shortSnapshotLocation.length() + 1); String file1 = files[1]; assertEquals(file0, file1, "hl filenames are the same"); } private void testBootstrapLocking() throws Exception { // Get the bootstrap state handlers KeyManager keyManager = om.getKeyManager(); BootstrapStateHandler keyDeletingService = keyManager.getDeletingService(); BootstrapStateHandler snapshotDeletingService = keyManager.getSnapshotDeletingService(); BootstrapStateHandler sstFilteringService = keyManager.getSnapshotSstFilteringService(); BootstrapStateHandler differ = om.getMetadataManager() .getStore() .getRocksDBCheckpointDiffer(); ExecutorService executorService = Executors.newCachedThreadPool(); OMDBCheckpointServlet omDbCheckpointServlet = new OMDBCheckpointServlet(); OMDBCheckpointServlet spyServlet = spy(omDbCheckpointServlet); ServletContext servletContext = mock(ServletContext.class); when(servletContext.getAttribute(OzoneConsts.OM_CONTEXT_ATTRIBUTE)) .thenReturn(om); doReturn(servletContext).when(spyServlet).getServletContext(); spyServlet.init(); // Confirm the other handlers are locked out when the bootstrap // servlet takes the lock. try (BootstrapStateHandler.Lock ignoredLock = spyServlet.getBootstrapStateLock().lock()) { confirmServletLocksOutOtherHandler(keyDeletingService, executorService); confirmServletLocksOutOtherHandler(snapshotDeletingService, executorService); confirmServletLocksOutOtherHandler(sstFilteringService, executorService); confirmServletLocksOutOtherHandler(differ, executorService); } // Confirm the servlet is locked out when any of the other // handlers takes the lock. confirmOtherHandlerLocksOutServlet(keyDeletingService, spyServlet, executorService); confirmOtherHandlerLocksOutServlet(snapshotDeletingService, spyServlet, executorService); confirmOtherHandlerLocksOutServlet(sstFilteringService, spyServlet, executorService); confirmOtherHandlerLocksOutServlet(differ, spyServlet, executorService); // Confirm that servlet takes the lock when none of the other // handlers have it. Future<Boolean> servletTest = checkLock(spyServlet, executorService); assertTrue(servletTest.get(10000, TimeUnit.MILLISECONDS)); executorService.shutdownNow(); } // Confirms handler can't take look the servlet already has. Assumes // the servlet has already taken the lock. private void confirmServletLocksOutOtherHandler(BootstrapStateHandler handler, ExecutorService executorService) { Future<Boolean> test = checkLock(handler, executorService); // Handler should fail to take the lock because the servlet has taken it. assertThrows(TimeoutException.class, () -> test.get(500, TimeUnit.MILLISECONDS)); } // Confirms Servlet can't take lock when handler has it. private void confirmOtherHandlerLocksOutServlet(BootstrapStateHandler handler, BootstrapStateHandler servlet, ExecutorService executorService) throws InterruptedException { try (BootstrapStateHandler.Lock ignoredLock = handler.getBootstrapStateLock().lock()) { Future<Boolean> test = checkLock(servlet, executorService); // Servlet should fail to lock when other handler has taken it. assertThrows(TimeoutException.class, () -> test.get(500, TimeUnit.MILLISECONDS)); } } // Confirm lock is available by having handler take and release it. private Future<Boolean> checkLock(BootstrapStateHandler handler, ExecutorService executorService) { return executorService.submit(() -> { try { handler.getBootstrapStateLock().lock(); handler.getBootstrapStateLock().unlock(); return true; } catch (InterruptedException e) { } return false; }); } }
apache/uima-uimaj
36,953
uimaj-core/src/test/java/org/apache/uima/cas/impl/SelectFsAssert.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.cas.impl; import static java.lang.Math.max; import static java.lang.Math.min; import static java.lang.String.format; import static java.lang.System.currentTimeMillis; import static java.lang.System.identityHashCode; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Comparator.comparing; import static java.util.Comparator.reverseOrder; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import static org.apache.uima.cas.text.AnnotationPredicateTestData.RelativePosition.COVERED_BY; import static org.apache.uima.cas.text.AnnotationPredicateTestData.RelativePosition.FOLLOWING; import static org.apache.uima.cas.text.AnnotationPredicateTestData.RelativePosition.PRECEDING; import static org.apache.uima.cas.text.AnnotationPredicates.overlapping; import static org.assertj.core.api.Assertions.assertThat; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.function.IntFunction; import org.apache.uima.cas.CAS; import org.apache.uima.cas.FSIterator; import org.apache.uima.cas.SelectFSs; import org.apache.uima.cas.Type; import org.apache.uima.cas.serdes.generators.MultiTypeRandomCasGenerator; import org.apache.uima.cas.text.AnnotationFS; import org.apache.uima.cas.text.AnnotationPredicateAssert.TestCase; import org.apache.uima.cas.text.AnnotationPredicateTestData.RelativePosition; import org.apache.uima.jcas.tcas.Annotation; import org.apache.uima.resource.metadata.TypeDescription; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.apache.uima.util.CasCreationUtils; import org.assertj.core.api.AutoCloseableSoftAssertions; public class SelectFsAssert { private static boolean logAnnotationCreation = false; public static void assertSelectFS(RelativePosition aCondition, RelativeAnnotationPredicate aPredicate, List<TestCase> aTestCases) throws Exception { CAS cas = CasCreationUtils.createCas(); Type type = cas.getAnnotationType(); try (AutoCloseableSoftAssertions softly = new AutoCloseableSoftAssertions()) { for (TestCase testCase : aTestCases) { cas.reset(); // Create annotations Annotation x = (Annotation) cas.createAnnotation(type, 0, 0); Annotation y = (Annotation) cas.createAnnotation(type, 0, 0); // Position the annotations according to the test data testCase.getTest().apply((beginA, endA, beginB, endB) -> { x.setBegin(beginA); x.setEnd(endA); y.setBegin(beginB); y.setEnd(endB); cas.addFsToIndexes(x); cas.addFsToIndexes(y); return true; }); softly.assertThat(aPredicate.apply(cas, type, x, y)).as(testCase.getDescription()) .isEqualTo(testCase.getValidPositions().contains(aCondition)); } } } public static void assertSelectionIsEqualOnRandomData(RelativePosition xRelToY, String description, int aIterations, int aTypes, TypeByContextSelector aExpected, TypeByContextSelectorAsSelection aActual) throws Exception { long lockedSeed = -1l; IntFunction<Integer> annotationsPerIteration = iteration -> iteration * 3; // ============================================================================================ // Quick overrides for debugging // // 1) Normally you run tests with all of the lines below commented out // 2) If you get a failure, you run the single failing test commenting in the // iterations, annotationsPerIteration and types overrides. Adjust them until you get // a setup that fails with a minimal number of types / annotations. // 3) Note the RANDOM SEED logged to the console and put it into the lockedSeed variable here // and comment it in // // Most of the time, it should be possible to find scenario that fails with max 3 types and 3 // annotations - but it might take a very long time to find such a scenario using the random // approach. So try higher numbers until you find something, then try lowering the number until // you are happy with the scenario size and then implement a unit test for the scenario. In the // unit test, you can then try removing annotations and/or types while still having the scenario // fail. Once you have a minimal setup, debug and fix it. // // The tests should be implemented in the SelectFsTest class. // -------------------------------------------------------------------------------------------- // lockedSeed = 779033675811369l; // aIterations = 100_000; // annotationsPerIteration = iteration -> 2; // aTypes = 1; // ============================================================================================ // Override settings when using a locked seed to be more debugging-friendly aIterations = lockedSeed != -1l ? 1 : aIterations; logAnnotationCreation = lockedSeed != -1; System.out.print("Iteration: "); try { Map<Integer, Integer> sizeCounts = new HashMap<>(); Map<String, Long> timings = new LinkedHashMap<>(); for (int i = 0; i < aIterations; i++) { long seed = lockedSeed != -1 ? lockedSeed : System.nanoTime(); Random rnd = new Random(seed); if (i % 10 == 0) { System.out.print(i); if (i > 0 && i % 100 == 0) { System.out.println(); } } else { System.out.print("."); } MultiTypeRandomCasGenerator casRandomizer = MultiTypeRandomCasGenerator.builder() // .withRandomGenerator(rnd) // .withSize(annotationsPerIteration.apply(i + 1)) // .withMinimumAnnotationLength(0) // .withAnnotationLogOutput(logAnnotationCreation) // .withTypeCount(aTypes) // .build(); TypeSystemDescription tsd = casRandomizer.generateTypeSystem(); CAS randomCas = casRandomizer.generateCas(tsd); Map<String, Type> types = new LinkedHashMap<>(); for (TypeDescription td : tsd.getTypes()) { types.put(td.getName(), randomCas.getTypeSystem().getType(td.getName())); } Iterator<Type> ti = types.values().iterator(); Type typeY = ti.next(); Type typeX = ti.hasNext() ? ti.next() : typeY; Type[] typeList = types.values().toArray(new Type[types.size()]); for (Annotation y : randomCas.<Annotation> select(typeY)) { switch (rnd.nextInt(3)) { case 0: // Randomly use a non-indexed annotation for selection so we test both cases (using an // indexed or non-indexed annotation). y = (Annotation) randomCas.createAnnotation(typeList[rnd.nextInt(typeList.length)], y.getBegin(), y.getEnd()); break; case 1: // Randomly use a completely new annotation int begin = rnd.nextInt(100); int end = begin + rnd.nextInt(30); y = (Annotation) randomCas.createAnnotation(typeList[rnd.nextInt(typeList.length)], begin, end); default: // Nothing to do } long tExpected = System.currentTimeMillis(); List<Annotation> expected = aExpected.select(randomCas, typeX, y).stream() .map(a -> (Annotation) a).collect(toList()); timings.compute("filt. full scan", (k, v) -> v == null ? 0l : v + currentTimeMillis() - tExpected); sizeCounts.compute(expected.size(), (k, v) -> v == null ? 1 : v++); try { assertSelectionAsList(expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); assertInitialPositionIsFirstPosition(expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); assertSelectionAsForwardIteration(expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); assertSelectionAsBackwardIteration(expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // false | false | false // @formatter:on assertSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards | Shifted // false | false | false | true // @formatter:on assertShiftedSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // false | false | true // @formatter:on assertBackwardsSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // false | true | false // @formatter:on assertLimitedSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // false | true | true // @formatter:on assertLimitedBackwardsSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards | Shifted // false | true | true | true // @formatter:on assertShiftedLimitedBackwardsSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // true | false | false // @formatter:on assertNonOverlappingSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // true | false | true // @formatter:on assertBackwardsNonOverlappingSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // true | true | false // @formatter:on assertLimitedNonOverlappingSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards // true | true | true // @formatter:on assertLimitedBackwardsNonOverlappingSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); // @formatter:off // Non-Overlapping | Limited | Backwards | Shifted // true | true | true | true // @formatter:on assertShiftedLimitedBackwardsNonOverlappingSelectionAsRandomIteration(rnd, expected, randomCas, aActual, xRelToY, description, typeX, typeY, y, timings); } catch (Throwable e) { // Set a breakpoint here to halt when an assert above fails. The select triggering the // assert is then re-executed below and you can look into its details. To allow // stopping and re-executing the test, you need to put the displayed random seed into // the static variable RANDOM_SEED at the beginning of the file. Don't forget to // Comment this out again and to re-set the RANDOM_SEED to timer-based when you are // done with debugging. System.out.printf("RANDOM SEED: %d%n", seed); aActual.select(randomCas, typeX, y); throw e; } } } System.out.print(aIterations); System.out.print( timings.entrySet().stream().map(e -> format("%s: %4dms", e.getKey(), e.getValue())) .collect(joining(" | ", " (", ")"))); } finally { System.out.println(); } } private static void assertSelectionAsList(List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { long t = System.currentTimeMillis(); List<Annotation> listActual = aActual.select(randomCas, typeX, y).asList(); timings.compute("asList", (k, v) -> v == null ? 0l : v + currentTimeMillis() - t); assertThat(listActual) .as("Selecting X of type [%s] %s [%s]@[%d-%d][%d] %s asList%n%s%n", typeX.getName(), aXRelToY, y.getType().getShortName(), y.getBegin(), y.getEnd(), identityHashCode(y), description, casToString(randomCas)) .containsExactlyElementsOf(expected); } private static void assertInitialPositionIsFirstPosition(List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { FSIterator<Annotation> it = aActual.select(randomCas, typeX, y).fsIterator(); Annotation initial = it.isValid() ? it.get() : null; it.moveToFirst(); assertThat(it.isValid() ? it.get() : null).as( "Annotation pointed at by iterator initially should match annotation after calling " + "moveToFirst:%n%s%n%s%n" + "Selecting X of type [%s] %s%s [%s]@[%d-%d][%d] iterator forward%n%s%n", initial, it.isValid() ? it.get() : null, typeX.getName(), aXRelToY, description, y.getType().getShortName(), y.getBegin(), y.getEnd(), identityHashCode(y), casToString(randomCas)).isEqualTo(initial); } private static void assertSelectionAsForwardIteration(List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { List<Annotation> actual = new ArrayList<>(); long t = System.currentTimeMillis(); FSIterator<Annotation> it = aActual.select(randomCas, typeX, y).fsIterator(); it.moveToFirst(); while (it.isValid()) { actual.add(it.get()); it.moveToNext(); } timings.compute("it. >>", (k, v) -> v == null ? 0l : v + currentTimeMillis() - t); assertThat(actual) .as("Selecting X of type [%s] %s [%s]@[%d-%d][%d] %s iterator forward%n%s%n", typeX.getName(), aXRelToY, y.getType().getShortName(), y.getBegin(), y.getEnd(), identityHashCode(y), description, casToString(randomCas)) .containsExactlyElementsOf(expected); } private static void assertSelectionAsBackwardIteration(List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { List<Annotation> actual = new ArrayList<>(); long t = System.currentTimeMillis(); FSIterator<Annotation> it = aActual.select(randomCas, typeX, y).fsIterator(); it.moveToLast(); while (it.isValid()) { actual.add(0, it.get()); it.moveToPrevious(); } timings.compute("it. <<", (k, v) -> v == null ? 0l : v + currentTimeMillis() - t); assertThat(actual) .as("Selecting X of type [%s] %s [%s]@[%d-%d][%s] %s iterator backwards%n%s%n", typeX.getName(), aXRelToY, y.getType().getShortName(), y.getBegin(), y.getEnd(), identityHashCode(y), description, casToString(randomCas)) .containsExactlyElementsOf(expected); } private static void assertLimitedSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { int limit = rnd.nextInt(5); List<Annotation> limitedExpected = limit(expected, limit, aXRelToY); assertSelectionAsRandomIteration(rnd, limitedExpected, randomCas, (cas, type, context) -> aActual.select(cas, type, context).limit(limit), aXRelToY, description + " with limit(" + limit + ")", typeX, typeY, y, timings); } private static void assertLimitedBackwardsSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { int limit = rnd.nextInt(5); // FIXME: Actually... I am pretty sure that all selection types should use the same // precedence for limit/backwards... List<Annotation> adjustedExpectation; if (asList(FOLLOWING, PRECEDING).contains(aXRelToY)) { // This works with FOLLOWING / PRECEDING adjustedExpectation = limit(expected, limit, aXRelToY); adjustedExpectation = backwards(adjustedExpectation); } else { // This works with COVERED_BY, COVERING, COLOCATED adjustedExpectation = backwards(expected); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); } assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).limit(limit).backwards(), aXRelToY, description + " backwards with limit(" + limit + ")", typeX, typeY, y, timings); } private static void assertShiftedLimitedBackwardsSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { // Random shift in the range of [-2, 2] int shift = rnd.nextInt(2) - rnd.nextInt(4); int limit = rnd.nextInt(5); // FIXME: Actually... I am pretty sure that all selection types should use the same // precedence for limit/backwards... List<Annotation> adjustedExpectation; if (asList(FOLLOWING, PRECEDING).contains(aXRelToY)) { // This works with FOLLOWING / PRECEDING adjustedExpectation = shifted(expected, shift, aXRelToY); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); adjustedExpectation = backwards(adjustedExpectation); } else { // This works with COVERED_BY, COVERING, COLOCATED adjustedExpectation = backwards(expected); adjustedExpectation = shifted(adjustedExpectation, shift, aXRelToY); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); } assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).shifted(shift).limit(limit) .backwards(), aXRelToY, description + " backwards with limit(" + limit + ") shifted(" + shift + ")", typeX, typeY, y, timings); } private static void assertLimitedNonOverlappingSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { if (!nonOverlappingSupported(aXRelToY)) { return; } int limit = rnd.nextInt(5); List<Annotation> adjustedExpectation = unambiguous(expected); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).nonOverlapping() .limit(limit), aXRelToY, description + " non-overlapping with limit(" + limit + ")", typeX, typeY, y, timings); } private static void assertLimitedBackwardsNonOverlappingSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { if (!nonOverlappingSupported(aXRelToY)) { return; } int limit = rnd.nextInt(5); // FIXME: Actually... I am pretty sure that all selection types should use the same // precedence for limit/backwards... List<Annotation> adjustedExpectation; if (asList(FOLLOWING, PRECEDING).contains(aXRelToY)) { // This works with FOLLOWING / PRECEDING adjustedExpectation = unambiguous(expected); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); adjustedExpectation = backwards(adjustedExpectation); } else { // This works with COVERED_BY, COVERING, COLOCATED adjustedExpectation = unambiguous(expected); adjustedExpectation = backwards(adjustedExpectation); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); return; } assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).nonOverlapping().limit(limit) .backwards(), aXRelToY, description + " non-overlapping backwards with limit(" + limit + ")", typeX, typeY, y, timings); } private static void assertShiftedLimitedBackwardsNonOverlappingSelectionAsRandomIteration( Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { if (!nonOverlappingSupported(aXRelToY)) { return; } // Random shift in the range of [-2, 2] int shift = rnd.nextInt(2) - rnd.nextInt(4); int limit = rnd.nextInt(5); // FIXME: Actually... I am pretty sure that all selection types should use the same // precedence for limit/backwards... List<Annotation> adjustedExpectation; if (asList(FOLLOWING, PRECEDING).contains(aXRelToY)) { // This works with FOLLOWING / PRECEDING adjustedExpectation = unambiguous(expected); adjustedExpectation = shifted(adjustedExpectation, shift, aXRelToY); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); adjustedExpectation = backwards(adjustedExpectation); } else { // This works with COVERED_BY, COVERING, COLOCATED adjustedExpectation = unambiguous(expected); adjustedExpectation = backwards(adjustedExpectation); adjustedExpectation = shifted(adjustedExpectation, shift, aXRelToY); adjustedExpectation = limit(adjustedExpectation, limit, aXRelToY); return; } assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).nonOverlapping() .shifted(shift).limit(limit).backwards(), aXRelToY, description + " non-overlapping backwards with limit(" + limit + ") shifted(" + shift + ")", typeX, typeY, y, timings); } private static void assertNonOverlappingSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { if (!nonOverlappingSupported(aXRelToY)) { return; } List<Annotation> adjustedExpectation = unambiguous(expected); assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).nonOverlapping(), aXRelToY, " non-overlapping", typeX, typeY, y, timings); } private static void assertBackwardsNonOverlappingSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { if (!nonOverlappingSupported(aXRelToY)) { return; } List<Annotation> adjustedExpectation = backwards(unambiguous(expected)); assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).nonOverlapping().backwards(), aXRelToY, " backwards non-overlapping", typeX, typeY, y, timings); } private static void assertBackwardsSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { List<Annotation> adjustedExpectation = backwards(expected); assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).backwards(), aXRelToY, " backwards", typeX, typeY, y, timings); } private static void assertShiftedSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { // Random shift in the range of [-2, 2] int shift = rnd.nextInt(2) - rnd.nextInt(4); List<Annotation> adjustedExpectation = shifted(expected, shift, aXRelToY); assertSelectionAsRandomIteration(rnd, adjustedExpectation, randomCas, (cas, type, context) -> aActual.select(cas, type, context).shifted(shift), aXRelToY, " shifted by " + shift, typeX, typeY, y, timings); } private static void assertSelectionAsRandomIteration(Random rnd, List<Annotation> expected, CAS randomCas, TypeByContextSelectorAsSelection aActual, RelativePosition aXRelToY, String description, Type typeX, Type typeY, Annotation y, Map<String, Long> timings) { FSIterator<Annotation> it = aActual.select(randomCas, typeX, y).fsIterator(); if (expected.size() == 0) { assertThat(it.isValid()).as( "Selecting X of type [%s] %s [%s]@[%d-%d][%d] %s random iteration%n%s%n" + "Expected is empty but iterator is not invalid", // and points at // [%s]@[%d-%d][%d].", typeX.getName(), aXRelToY, y.getType().getShortName(), y.getBegin(), y .getEnd(), identityHashCode(y), description, casToString(randomCas) /* * , it.get().getType(). * getShortName(), * it.get().getBegin(), * it.get().getEnd(), * identityHashCode(it.get()) */).isFalse(); return; } StringBuilder expectedLog = new StringBuilder(); for (int i = 0; i < expected.size(); i++) { Annotation ann = expected.get(i); expectedLog.append( String.format("expected[%d] = %s@[%d-%d] [%d]%n", i, ann.getType().getShortName(), ann.getBegin(), ann.getEnd(), System.identityHashCode(ann))); } int cursor = 0; List<String> history = new ArrayList<>(); while (history.size() < 2) { switch (rnd.nextInt(9)) { case 0: // Move to beginning cursor = 0; it.moveToFirst(); history.add(format("[%d][%d] Moved to first", history.size(), cursor)); break; case 1: // Move to end cursor = expected.size() - 1; it.moveToLast(); history.add(format("[%d][%d] Moved to last", history.size(), cursor)); break; case 2: // Move to next case 3: // Move to next case 4: // Move to next if (cursor < expected.size() - 1) { cursor++; it.moveToNext(); history.add(format("[%d][%d] Moved to next", history.size(), cursor)); } break; case 5: // Move to next case 6: // Move to prev case 7: // Move to prev if (cursor > 0) { cursor--; it.moveToPrevious(); history.add(format("[%d][%d] Moved to previous", history.size(), cursor)); } break; case 8: // Move to specific FS case 9: // Move to specific FS cursor = rnd.nextInt(expected.size()); it.moveTo(expected.get(cursor)); history.add(format("[%d][%d] Moved to FS #%d [%d]", history.size(), cursor, cursor, System.identityHashCode(expected.get(cursor)))); break; } assertThat(it.isValid()).as( "Selecting X of type [%s] %s [%s]@[%d-%d][%d] %s random iteration%n%s%n" + "%s%nHistory:%n%s%n%nValidity mismatch.", typeX.getName(), aXRelToY, y.getType().getShortName(), y.getBegin(), y.getEnd(), identityHashCode(y), description, casToString(randomCas), expectedLog, history.stream().collect(joining("\n"))).isTrue(); assertThat(it.get()) .as("Selecting X of type [%s] %s [%s]@[%d-%d][%d] %s random iteration%n%s%n" + "%s%nHistory:%n%s%n%nExpectation mismatch. ", typeX.getName(), aXRelToY, y.getType().getShortName(), y.getBegin(), y.getEnd(), identityHashCode(y), description, casToString(randomCas), expectedLog, history.stream().collect(joining("\n"))) // We do not compare the exact annotation here because the moveTo operation moves to // the first annotation that matches the target annotation. If there are multiple // annoations with the same type/begin/end, then it moves to the first one of these, // even if the cursor is e.g. pointing to the second one. .isEqualToComparingOnlyGivenFields(expected.get(cursor), "begin", "end"); // Since the moveTo operation may not select the annotation the cursor is pointing to but // instead the first matching one, we may have to adjust the cursor after the moveTo operation // to keep the cursor in sync with the actual iterator behavior (e.g. which elements are // then selected after moveToNext or moveToPrevious operations. if (cursor != expected.indexOf(it.get())) { cursor = expected.indexOf(it.get()); history.add(format("[%d][%d] Adjusted cursor #%d", history.size(), cursor, cursor)); } } } private static String casToString(CAS aCas) { int MAX_ANNOTATIONS = 100; if (aCas.select().count() > MAX_ANNOTATIONS) { return "CAS contains more than " + MAX_ANNOTATIONS + " annotations - try tweaking the test parameters to reproduce" + " the isssue with a smaller CAS."; } StringBuilder sb = new StringBuilder(); aCas.select().forEach(fs -> { if (fs instanceof AnnotationFS) { AnnotationFS ann = (AnnotationFS) fs; sb.append(format("%s@[%3d-%3d] [%d] (parent type: %s)%n", ann.getType().getShortName(), ann.getBegin(), ann.getEnd(), System.identityHashCode(ann), ann.getCAS().getTypeSystem().getParent(ann.getType()))); } }); return sb.toString(); } private static boolean nonOverlappingSupported(RelativePosition aXRelToY) { return asList(COVERED_BY, FOLLOWING, PRECEDING).contains(aXRelToY); } private static int insertionPoint(List<Annotation> expected, Annotation fs) { return Collections.binarySearch(expected, fs, comparing(AnnotationFS::getBegin).thenComparing(AnnotationFS::getEnd, reverseOrder())); } private static List<Annotation> shifted(List<Annotation> expected, int shift, RelativePosition aXRelToY) { return startAtShifted(expected, null, shift, aXRelToY); } private static List<Annotation> startAtShifted(List<Annotation> expected, Annotation startFs, int shift, RelativePosition aXRelToY) { // NOTE: for the moment, we do not use the startFs, so the insertionPoint seek and handling // of the insertion point might be buggy! int start; int end; if (aXRelToY == PRECEDING) { // Shifting always shifts away from the reference FS - since with preceding the reference FS // is at the end of the selection region, we need to shift from the end here. start = 0; end = startFs != null ? insertionPoint(expected, startFs) : expected.size(); end -= shift; } else { start = startFs != null ? insertionPoint(expected, startFs) : 0; start += shift; end = expected.size(); } start = min(max(0, start), expected.size()); end = min(max(0, end), expected.size()); if (start > end) { return emptyList(); } return new ArrayList<>(expected.subList(start, end)); } private static List<Annotation> backwards(List<Annotation> expected) { List<Annotation> reverseExpected = new ArrayList<>(expected); Collections.reverse(reverseExpected); return reverseExpected; } private static List<Annotation> unambiguous(List<Annotation> expected) { List<Annotation> unambigousExpected = new ArrayList<>(); Annotation current = null; for (Annotation e : expected) { if (current == null || !overlapping(e, current)) { unambigousExpected.add(e); current = e; } } return unambigousExpected; } private static List<Annotation> limit(List<Annotation> expected, int limit, RelativePosition aXRelToY) { List<Annotation> limitedExpected = aXRelToY == PRECEDING ? expected.subList(max(0, expected.size() - limit), expected.size()) : expected.subList(0, min(limit, expected.size())); return limitedExpected; } @FunctionalInterface public interface RelativeAnnotationPredicate { boolean apply(CAS cas, Type type, Annotation x, Annotation y); } @FunctionalInterface public interface TypeByContextSelector { List<AnnotationFS> select(CAS aCas, Type aType, Annotation aContext); } @FunctionalInterface public interface TypeByContextSelectorAsSelection { SelectFSs<Annotation> select(CAS aCas, Type aType, Annotation aContext); } }
apache/tomcat80
36,757
test/javax/el/TestBeanELResolver.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.el; import java.beans.FeatureDescriptor; import java.beans.PropertyDescriptor; import java.util.ArrayList; import java.util.Iterator; import org.junit.Assert; import org.junit.Test; import org.apache.jasper.el.ELContextImpl; public class TestBeanELResolver { private static final String METHOD01_NAME = "toString"; private static final String METHOD02_NAME = "<init>"; private static final String METHOD03_NAME = "nonExistingMethod"; private static final String BEAN_NAME = "test"; private static final String PROPERTY01_NAME = "valueA"; private static final String PROPERTY02_NAME = "valueB"; private static final String PROPERTY03_NAME = "name"; private static final String PROPERTY_VALUE = "test1"; @Test public void testBug53421() { ExpressionFactory factory = ExpressionFactory.newInstance(); ELContext context = new ELContextImpl(factory); Bean bean = new Bean(); ValueExpression varBean = factory.createValueExpression(bean, Bean.class); context.getVariableMapper().setVariable("bean", varBean); ValueExpression ve = factory.createValueExpression( context, "${bean.valueA}", String.class); Exception e = null; try { ve.getValue(context); } catch (PropertyNotFoundException pnfe) { e = pnfe; } Assert.assertTrue("Wrong exception type", e instanceof PropertyNotFoundException); String type = Bean.class.getName(); String msg = e.getMessage(); Assert.assertTrue("No reference to type [" + type + "] where property cannot be found in [" + msg + "]", msg.contains(type)); } /** * Tests that a null context results in an NPE as per EL Javadoc. */ @Test(expected = NullPointerException.class) public void testGetType01() { BeanELResolver resolver = new BeanELResolver(); resolver.getType(null, new Object(), new Object()); } /** * Tests that a valid property is not resolved if base is null. */ @Test public void testGetType02() { doNegativeTest(null, new Object(), MethodUnderTest.GET_TYPE, true); } /** * Tests that a valid property is resolved. */ @Test public void testGetType03() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Class<?> result = resolver.getType(context, new Bean(), PROPERTY01_NAME); Assert.assertEquals(String.class, result); Assert.assertTrue(context.isPropertyResolved()); } /** * Tests that an exception will be thrown when the property does not exist. */ @Test(expected = PropertyNotFoundException.class) public void testGetType04() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.getType(context, new Bean(), PROPERTY02_NAME); } /** * Tests that an exception will be thrown when a coercion cannot be * performed. */ @Test(expected = PropertyNotFoundException.class) public void testGetType05() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.getType(context, new Bean(), new Object()); } /** * Tests that a null context results in an NPE as per EL Javadoc. */ @Test(expected = NullPointerException.class) public void testGetValue01() { BeanELResolver resolver = new BeanELResolver(); resolver.getValue(null, new Object(), new Object()); } /** * Tests that a valid property is not resolved if base is null. */ @Test public void testGetValue02() { doNegativeTest(null, new Object(), MethodUnderTest.GET_VALUE, true); } /** * Tests that a valid property is resolved. */ @Test public void testGetValue03() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.getValue(context, new TesterBean(BEAN_NAME), PROPERTY03_NAME); Assert.assertEquals(BEAN_NAME, result); Assert.assertTrue(context.isPropertyResolved()); } /** * Tests that an exception will be thrown when the property does not exist. */ @Test(expected = PropertyNotFoundException.class) public void testGetValue04() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.getValue(context, new Bean(), PROPERTY02_NAME); } /** * Tests that an exception will be thrown when a coercion cannot be * performed. */ @Test(expected = PropertyNotFoundException.class) public void testGetValue05() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.getValue(context, new Bean(), new Object()); } /** * Tests that an exception will be thrown when the property is not readable. */ @Test(expected = PropertyNotFoundException.class) public void testGetValue06() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.getValue(context, new Bean(), PROPERTY01_NAME); } /** * Tests that getter method throws exception which should be propagated. */ @Test(expected = ELException.class) public void testGetValue07() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.getValue(context, new TesterBean(BEAN_NAME), PROPERTY01_NAME); } /** * Tests that a null context results in an NPE as per EL Javadoc. */ @Test(expected = NullPointerException.class) public void testSetValue01() { BeanELResolver resolver = new BeanELResolver(); resolver.setValue(null, new Object(), new Object(), new Object()); } /** * Tests that a valid property is not resolved if base is null. */ @Test public void testSetValue02() { doNegativeTest(null, new Object(), MethodUnderTest.SET_VALUE, true); } /** * Tests that an exception is thrown when readOnly is true. */ @Test(expected = PropertyNotWritableException.class) public void testSetValue03() { BeanELResolver resolver = new BeanELResolver(true); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.setValue(context, new Bean(), new Object(), new Object()); } /** * Tests that a valid property is resolved. */ @Test public void testSetValue04() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); TesterBean bean = new TesterBean(BEAN_NAME); resolver.setValue(context, bean, PROPERTY03_NAME, PROPERTY_VALUE); Assert.assertEquals(PROPERTY_VALUE, resolver.getValue(context, bean, PROPERTY03_NAME)); Assert.assertTrue(context.isPropertyResolved()); } /** * Tests that an exception will be thrown when a coercion cannot be * performed. */ @Test(expected = PropertyNotFoundException.class) public void testSetValue05() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.setValue(context, new Bean(), new Object(), PROPERTY_VALUE); } /** * Tests that an exception will be thrown when the property does not exist. */ @Test(expected = PropertyNotFoundException.class) public void testSetValue06() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.setValue(context, new Bean(), PROPERTY02_NAME, PROPERTY_VALUE); } /** * Tests that an exception will be thrown when the property does not have * setter method. */ @Test(expected = PropertyNotWritableException.class) public void testSetValue07() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.setValue(context, new TesterBean(BEAN_NAME), PROPERTY01_NAME, PROPERTY_VALUE); } /** * Tests that a null context results in an NPE as per EL Javadoc. */ @Test(expected = NullPointerException.class) public void testIsReadOnly01() { BeanELResolver resolver = new BeanELResolver(); resolver.isReadOnly(null, new Object(), new Object()); } /** * Tests that the propertyResolved is false if base is null. */ @Test public void testIsReadOnly02() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.isReadOnly(context, null, new Object()); Assert.assertFalse(context.isPropertyResolved()); resolver = new BeanELResolver(true); resolver.isReadOnly(context, null, new Object()); Assert.assertFalse(context.isPropertyResolved()); } /** * Tests that if the BeanELResolver is constructed with readOnly the method * will return always true. */ @Test public void testIsReadOnly03() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); boolean result = resolver.isReadOnly(context, new TesterBean(BEAN_NAME), PROPERTY03_NAME); Assert.assertFalse(result); Assert.assertTrue(context.isPropertyResolved()); resolver = new BeanELResolver(true); result = resolver.isReadOnly(context, new TesterBean(BEAN_NAME), PROPERTY03_NAME); Assert.assertTrue(result); Assert.assertTrue(context.isPropertyResolved()); } /** * Tests that an exception is thrown when a coercion cannot be performed. */ @Test(expected = PropertyNotFoundException.class) public void testIsReadOnly04() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.isReadOnly(context, new TesterBean(BEAN_NAME), Integer.valueOf(0)); } /** * Tests that an exception will be thrown when the property does not exist. */ @Test(expected = PropertyNotFoundException.class) public void testIsReadOnly05() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.isReadOnly(context, new Bean(), PROPERTY02_NAME); } /** * Tests that true will be returned when the property does not have setter * method. */ @Test public void testIsReadOnly06() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); boolean result = resolver.isReadOnly(context, new TesterBean(BEAN_NAME), PROPERTY01_NAME); Assert.assertTrue(result); Assert.assertTrue(context.isPropertyResolved()); } /** * Tests that a valid FeatureDescriptors are not returned if base is not * Map. */ @Test public void testGetFeatureDescriptors01() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Iterator<FeatureDescriptor> result = resolver.getFeatureDescriptors(context, null); Assert.assertNull(result); } /** * Tests that a valid FeatureDescriptors are returned. */ @Test public void testGetFeatureDescriptors02() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Iterator<FeatureDescriptor> result = resolver.getFeatureDescriptors(context, new Bean()); while (result.hasNext()) { PropertyDescriptor featureDescriptor = (PropertyDescriptor) result.next(); Assert.assertEquals(featureDescriptor.getPropertyType(), featureDescriptor.getValue(ELResolver.TYPE)); Assert.assertEquals(Boolean.TRUE, featureDescriptor.getValue(ELResolver.RESOLVABLE_AT_DESIGN_TIME)); } } /** * Tests that a null context results in an NPE as per EL Javadoc. */ @Test(expected = NullPointerException.class) public void testInvoke01() { BeanELResolver resolver = new BeanELResolver(); resolver.invoke(null, new Object(), new Object(), new Class<?>[0], new Object[0]); } /** * Tests that a valid property is not resolved if base is null. */ @Test public void testInvoke02() { doNegativeTest(null, new Object(), MethodUnderTest.INVOKE, true); } /** * Tests a method invocation. */ @Test public void testInvoke03() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), METHOD01_NAME, new Class<?>[] {}, new Object[] {}); Assert.assertEquals(BEAN_NAME, result); Assert.assertTrue(context.isPropertyResolved()); } /** * Tests that the method name cannot be coerced to String. */ @Test public void testInvoke04() { doNegativeTest(new Bean(), null, MethodUnderTest.INVOKE, true); } /** * Tests that a call to &lt;init&gt; as a method name will throw an exception. */ @Test(expected = MethodNotFoundException.class) public void testInvoke05() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.invoke(context, new TesterBean(BEAN_NAME), METHOD02_NAME, new Class<?>[] {}, new Object[] {}); } /** * Tests that a call to a non existing method will throw an exception. */ @Test(expected = MethodNotFoundException.class) public void testInvoke06() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); resolver.invoke(context, new TesterBean(BEAN_NAME), METHOD03_NAME, new Class<?>[] {}, new Object[] {}); } @Test public void testInvokeVarargsCoerce01() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] {}, new String[] {}); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce02() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", null, null); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce03() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", null, new String[] {}); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce04() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] {}, null); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce05() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { null }, new String[] { null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce06() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", null, new String[] { null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce07() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { null }, null); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce08() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class }, new String[] { "true" }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce09() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class }, new Object[] { "true", null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce10() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String[].class }, new Object[] { "true", null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce11() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class }, new Object[] { "10" }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce12() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String[].class }, new String[] { "10" }); Assert.assertEquals(BEAN_NAME, result); } // Ambiguous because the Strings coerce to both Boolean and Integer hence // both varargs methods match. @Test(expected=MethodNotFoundException.class) public void testInvokeVarargsCoerce13() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class }, new String[] { "10", "11" }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce14() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class }, new String[] { "true", null }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargsCoerce15() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class }, new Object[] { "true", new ArrayList<>() }); Assert.assertEquals(BEAN_NAME, result); } // Ambiguous because the Strings coerce to both Boolean and Integer hence // both varargs methods match. @Test(expected=MethodNotFoundException.class) public void testInvokeVarargsCoerce16() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class, String.class }, new Object[] { "10", "11", "12" }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargsCoerce17() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class }, new Object[] { "10", "11", "12" }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargsCoerce18() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class, String.class, String.class }, new Object[] { "10", "11", "12" }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargsCoerce19() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class, String.class, String.class }, new Object[] { "true", "10", "11", "12" }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargsCoerce20() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class, String.class }, new Object[] { "true", "10", "11", "12" }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargsCoerce21() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { String.class, String.class, String.class, String.class, String.class }, new Object[] { "true", "10", "11", "12" }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs01() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] {}, new Object[] {}); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs02() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", null, null); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs03() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", null, new Object[] {}); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs04() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] {}, null); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs05() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { null }, new Object[] { null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs06() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", null, new Object[] { null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs07() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { null }, null); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs08() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class }, new Object[] { Boolean.TRUE }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs09() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class, Integer.class }, new Object[] { Boolean.TRUE, null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs10() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class, Integer[].class }, new Object[] { Boolean.TRUE, null }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs11() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Integer.class }, new Object[] { Integer.valueOf(10) }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs12() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Integer[].class }, new Object[] { Integer.valueOf(10) }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs13() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Integer.class, Integer.class }, new Object[] { Integer.valueOf(10), Integer.valueOf(11) }); Assert.assertEquals(BEAN_NAME, result); } // Note: The coercion rules are that a null of any type can be coerced to a // null of *any* other type so this works. @Test public void testInvokeVarargs14() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class, ArrayList.class }, new Object[] { Boolean.TRUE, null }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargs15() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class, ArrayList.class }, new Object[] { Boolean.TRUE, new ArrayList<>() }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs16() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Integer.class, Integer.class, Integer.class }, new Object[] { Integer.valueOf(10), Integer.valueOf(11), Integer.valueOf(12) }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargs17() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Integer.class, Integer.class }, new Object[] { Integer.valueOf(10), Integer.valueOf(11), Integer.valueOf(12) }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargs18() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Integer.class, Integer.class, Integer.class, Integer.class }, new Object[] { Integer.valueOf(10), Integer.valueOf(11), Integer.valueOf(12) }); Assert.assertEquals(BEAN_NAME, result); } @Test public void testInvokeVarargs19() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class, Integer.class, Integer.class, Integer.class }, new Object[] { Boolean.TRUE, Integer.valueOf(10), Integer.valueOf(11), Integer.valueOf(12) }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargs20() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class, Integer.class, Integer.class }, new Object[] { Boolean.TRUE, Integer.valueOf(10), Integer.valueOf(11), Integer.valueOf(12) }); Assert.assertEquals(BEAN_NAME, result); } @Test(expected=MethodNotFoundException.class) public void testInvokeVarargs21() { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = resolver.invoke(context, new TesterBean(BEAN_NAME), "getNameVarargs", new Class<?>[] { Boolean.class, Integer.class, Integer.class, Integer.class, Integer.class }, new Object[] { Boolean.TRUE, Integer.valueOf(10), Integer.valueOf(11), Integer.valueOf(12) }); Assert.assertEquals(BEAN_NAME, result); } private static class Bean { @SuppressWarnings("unused") public void setValueA(String valueA) { // NOOP } } private void doNegativeTest(Object base, Object trigger, MethodUnderTest method, boolean checkResult) { BeanELResolver resolver = new BeanELResolver(); ELContext context = new StandardELContext(ELManager.getExpressionFactory()); Object result = null; switch (method) { case GET_VALUE: { result = resolver.getValue(context, base, trigger); break; } case SET_VALUE: { resolver.setValue(context, base, trigger, new Object()); break; } case GET_TYPE: { result = resolver.getType(context, base, trigger); break; } case INVOKE: { result = resolver.invoke(context, base, trigger, new Class<?>[0], new Object[0]); break; } default: { // Should never happen Assert.fail("Missing case for method"); } } if (checkResult) { Assert.assertNull(result); } Assert.assertFalse(context.isPropertyResolved()); } private static enum MethodUnderTest { GET_VALUE, SET_VALUE, GET_TYPE, INVOKE } }
googleapis/google-cloud-java
36,730
java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/UpdateNotificationChannelRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/v3/notification_service.proto // Protobuf Java Version: 3.25.8 package com.google.monitoring.v3; /** * * * <pre> * The `UpdateNotificationChannel` request. * </pre> * * Protobuf type {@code google.monitoring.v3.UpdateNotificationChannelRequest} */ public final class UpdateNotificationChannelRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.monitoring.v3.UpdateNotificationChannelRequest) UpdateNotificationChannelRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateNotificationChannelRequest.newBuilder() to construct. private UpdateNotificationChannelRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateNotificationChannelRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateNotificationChannelRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.NotificationServiceProto .internal_static_google_monitoring_v3_UpdateNotificationChannelRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.NotificationServiceProto .internal_static_google_monitoring_v3_UpdateNotificationChannelRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.UpdateNotificationChannelRequest.class, com.google.monitoring.v3.UpdateNotificationChannelRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int NOTIFICATION_CHANNEL_FIELD_NUMBER = 3; private com.google.monitoring.v3.NotificationChannel notificationChannel_; /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the notificationChannel field is set. */ @java.lang.Override public boolean hasNotificationChannel() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The notificationChannel. */ @java.lang.Override public com.google.monitoring.v3.NotificationChannel getNotificationChannel() { return notificationChannel_ == null ? com.google.monitoring.v3.NotificationChannel.getDefaultInstance() : notificationChannel_; } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.monitoring.v3.NotificationChannelOrBuilder getNotificationChannelOrBuilder() { return notificationChannel_ == null ? com.google.monitoring.v3.NotificationChannel.getDefaultInstance() : notificationChannel_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getNotificationChannel()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getNotificationChannel()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.monitoring.v3.UpdateNotificationChannelRequest)) { return super.equals(obj); } com.google.monitoring.v3.UpdateNotificationChannelRequest other = (com.google.monitoring.v3.UpdateNotificationChannelRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasNotificationChannel() != other.hasNotificationChannel()) return false; if (hasNotificationChannel()) { if (!getNotificationChannel().equals(other.getNotificationChannel())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasNotificationChannel()) { hash = (37 * hash) + NOTIFICATION_CHANNEL_FIELD_NUMBER; hash = (53 * hash) + getNotificationChannel().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.monitoring.v3.UpdateNotificationChannelRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The `UpdateNotificationChannel` request. * </pre> * * Protobuf type {@code google.monitoring.v3.UpdateNotificationChannelRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.monitoring.v3.UpdateNotificationChannelRequest) com.google.monitoring.v3.UpdateNotificationChannelRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.NotificationServiceProto .internal_static_google_monitoring_v3_UpdateNotificationChannelRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.NotificationServiceProto .internal_static_google_monitoring_v3_UpdateNotificationChannelRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.UpdateNotificationChannelRequest.class, com.google.monitoring.v3.UpdateNotificationChannelRequest.Builder.class); } // Construct using com.google.monitoring.v3.UpdateNotificationChannelRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getNotificationChannelFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } notificationChannel_ = null; if (notificationChannelBuilder_ != null) { notificationChannelBuilder_.dispose(); notificationChannelBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.monitoring.v3.NotificationServiceProto .internal_static_google_monitoring_v3_UpdateNotificationChannelRequest_descriptor; } @java.lang.Override public com.google.monitoring.v3.UpdateNotificationChannelRequest getDefaultInstanceForType() { return com.google.monitoring.v3.UpdateNotificationChannelRequest.getDefaultInstance(); } @java.lang.Override public com.google.monitoring.v3.UpdateNotificationChannelRequest build() { com.google.monitoring.v3.UpdateNotificationChannelRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.monitoring.v3.UpdateNotificationChannelRequest buildPartial() { com.google.monitoring.v3.UpdateNotificationChannelRequest result = new com.google.monitoring.v3.UpdateNotificationChannelRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.monitoring.v3.UpdateNotificationChannelRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.notificationChannel_ = notificationChannelBuilder_ == null ? notificationChannel_ : notificationChannelBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.monitoring.v3.UpdateNotificationChannelRequest) { return mergeFrom((com.google.monitoring.v3.UpdateNotificationChannelRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.monitoring.v3.UpdateNotificationChannelRequest other) { if (other == com.google.monitoring.v3.UpdateNotificationChannelRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasNotificationChannel()) { mergeNotificationChannel(other.getNotificationChannel()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 case 26: { input.readMessage( getNotificationChannelFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.monitoring.v3.NotificationChannel notificationChannel_; private com.google.protobuf.SingleFieldBuilderV3< com.google.monitoring.v3.NotificationChannel, com.google.monitoring.v3.NotificationChannel.Builder, com.google.monitoring.v3.NotificationChannelOrBuilder> notificationChannelBuilder_; /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the notificationChannel field is set. */ public boolean hasNotificationChannel() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The notificationChannel. */ public com.google.monitoring.v3.NotificationChannel getNotificationChannel() { if (notificationChannelBuilder_ == null) { return notificationChannel_ == null ? com.google.monitoring.v3.NotificationChannel.getDefaultInstance() : notificationChannel_; } else { return notificationChannelBuilder_.getMessage(); } } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setNotificationChannel(com.google.monitoring.v3.NotificationChannel value) { if (notificationChannelBuilder_ == null) { if (value == null) { throw new NullPointerException(); } notificationChannel_ = value; } else { notificationChannelBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setNotificationChannel( com.google.monitoring.v3.NotificationChannel.Builder builderForValue) { if (notificationChannelBuilder_ == null) { notificationChannel_ = builderForValue.build(); } else { notificationChannelBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeNotificationChannel(com.google.monitoring.v3.NotificationChannel value) { if (notificationChannelBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && notificationChannel_ != null && notificationChannel_ != com.google.monitoring.v3.NotificationChannel.getDefaultInstance()) { getNotificationChannelBuilder().mergeFrom(value); } else { notificationChannel_ = value; } } else { notificationChannelBuilder_.mergeFrom(value); } if (notificationChannel_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearNotificationChannel() { bitField0_ = (bitField0_ & ~0x00000002); notificationChannel_ = null; if (notificationChannelBuilder_ != null) { notificationChannelBuilder_.dispose(); notificationChannelBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.monitoring.v3.NotificationChannel.Builder getNotificationChannelBuilder() { bitField0_ |= 0x00000002; onChanged(); return getNotificationChannelFieldBuilder().getBuilder(); } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.monitoring.v3.NotificationChannelOrBuilder getNotificationChannelOrBuilder() { if (notificationChannelBuilder_ != null) { return notificationChannelBuilder_.getMessageOrBuilder(); } else { return notificationChannel_ == null ? com.google.monitoring.v3.NotificationChannel.getDefaultInstance() : notificationChannel_; } } /** * * * <pre> * Required. A description of the changes to be applied to the specified * notification channel. The description must provide a definition for * fields to be updated; the names of these fields should also be * included in the `update_mask`. * </pre> * * <code> * .google.monitoring.v3.NotificationChannel notification_channel = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.monitoring.v3.NotificationChannel, com.google.monitoring.v3.NotificationChannel.Builder, com.google.monitoring.v3.NotificationChannelOrBuilder> getNotificationChannelFieldBuilder() { if (notificationChannelBuilder_ == null) { notificationChannelBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.monitoring.v3.NotificationChannel, com.google.monitoring.v3.NotificationChannel.Builder, com.google.monitoring.v3.NotificationChannelOrBuilder>( getNotificationChannel(), getParentForChildren(), isClean()); notificationChannel_ = null; } return notificationChannelBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.monitoring.v3.UpdateNotificationChannelRequest) } // @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateNotificationChannelRequest) private static final com.google.monitoring.v3.UpdateNotificationChannelRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.monitoring.v3.UpdateNotificationChannelRequest(); } public static com.google.monitoring.v3.UpdateNotificationChannelRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateNotificationChannelRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateNotificationChannelRequest>() { @java.lang.Override public UpdateNotificationChannelRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateNotificationChannelRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateNotificationChannelRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.monitoring.v3.UpdateNotificationChannelRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/coherence
36,667
prj/coherence-core/src/main/java/com/tangosol/io/ByteArrayWriteBuffer.java
/* * Copyright (c) 2000, 2024, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ package com.tangosol.io; import com.tangosol.util.Binary; import com.tangosol.util.ExternalizableHelper; import java.io.EOFException; import java.io.IOException; import java.io.UTFDataFormatException; import java.nio.ByteBuffer; import java.nio.CharBuffer; /** * ByteArrayWriteBuffer is an implementation of WriteBuffer on a byte array. * It is designed to support both fixed length buffers and resizable buffers. * <p> * This implementation is not intended to be thread safe. * * @author cp 2005.03.24 */ public class ByteArrayWriteBuffer extends AbstractWriteBuffer { // ----- constructors --------------------------------------------------- /** * Default constructor; intended only for use by subclasses. * <p> * Note that this default constructor leaves the buffer in an invalid * state. */ protected ByteArrayWriteBuffer() { } /** * Construct a ByteArrayWriteBuffer on a byte array. * * @param ab a byte array * * @exception NullPointerException if <tt>ab</tt> is <tt>null</tt> */ public ByteArrayWriteBuffer(byte[] ab) { m_ab = ab; m_cbMax = ab.length; } /** * Construct an ByteArrayWriteBuffer with a certain initial capacity. * * @param cbCap initial capacity * * @exception IllegalArgumentException if <tt>cbCap</tt> is negative */ public ByteArrayWriteBuffer(int cbCap) { this(cbCap, Integer.MAX_VALUE); } /** * Construct an ByteArrayWriteBuffer with a certain initial capacity and * a certain maximum capacity. * * @param cbCap initial capacity * @param cbMax maximum capacity * * @exception IllegalArgumentException if <tt>cbCap</tt> or <tt>cbMax</tt> * is negative, or if <tt>cbCap</tt> is greater than * <tt>cbMax</tt> */ public ByteArrayWriteBuffer(int cbCap, int cbMax) { if (cbCap < 0 || cbMax < 0 || cbCap > cbMax) { throw new IllegalArgumentException("cap=" + cbCap + "; max=" + cbMax); } m_ab = createBytes(cbCap); m_cbMax = cbMax; } /** * Create a new ByteArrayWriteBuffer based on a region of an already * existing WriteBuffer. * * @param buffer the source buffer * @param i the offset within the source buffer * @param cb the number of bytes to copy */ public ByteArrayWriteBuffer(WriteBuffer buffer, int i, int cb) { m_cbMax = cb - i; m_ab = createBytes(m_cbMax); write(0, buffer.getUnsafeReadBuffer(), i, cb); } // ----- buffer write operations ---------------------------------------- /** * {@inheritDoc} */ public final void write(int ofDest, byte b) { checkBounds(ofDest, 1); m_ab[ofDest] = b; updateLength(ofDest + 1); } /** * {@inheritDoc} */ public final void write(int ofDest, byte[] abSrc, int ofSrc, int cbSrc) { checkBounds(ofDest, cbSrc); // it's necessary to call this (even if cbSrc==0) in order to // correctly validate the arguments System.arraycopy(abSrc, ofSrc, m_ab, ofDest, cbSrc); if (cbSrc > 0) { updateLength(ofDest + cbSrc); } } /** * {@inheritDoc} */ public final void write(int ofDest, ReadBuffer bufSrc, int ofSrc, int cbSrc) { checkBounds(ofDest, cbSrc); // it's necessary to call this (even if cbSrc==0) in order to // correctly validate the arguments bufSrc.copyBytes(ofSrc, ofSrc + cbSrc, m_ab, ofDest); updateLength(ofDest + cbSrc); } /** * {@inheritDoc} */ public final void write(int ofDest, InputStreaming stream, int cbSrc) throws IOException { // see if it is a known implementation that we can optimize for if (stream instanceof ReadBuffer.BufferInput) { copyBufferInputPortion(ofDest, (ReadBuffer.BufferInput) stream, cbSrc); return; } // read the stream straight into the underlying byte[] checkBounds(ofDest, cbSrc); int cbRead = 0; try { while (cbRead < cbSrc) { int cbActual = stream.read(m_ab, ofDest + cbRead, cbSrc - cbRead); if (cbActual < 0) { throw new EOFException("instructed to copy " + cbSrc + " bytes, but only " + cbRead + " were available"); } else { cbRead += cbActual; } } } finally { if (cbRead > 0) { updateLength(ofDest + cbRead); } } } // ----- buffer maintenance ---------------------------------------------- /** * {@inheritDoc} */ public final int length() { return m_cb; } /** * Reconfigure the length of the buffer. The length must not be longer than * the available capacity. * * @param cb the new length of the buffer */ public final void setLength(int cb) { assert cb <= m_cbMax; updateLength(cb); } /** * {@inheritDoc} */ public final void retain(int of, int cb) { if (of < 0 || cb < 0 || of + cb > m_cb) { throw new IndexOutOfBoundsException("of=" + of + ", cb=" + cb + ", length()=" + m_cb); } if (of > 0 && cb > 0) { byte[] ab = m_ab; System.arraycopy(ab, of, ab, 0, cb); } m_cb = cb; m_bufUnsafe = null; } /** * {@inheritDoc} */ public final int getCapacity() { return m_ab.length; } /** * {@inheritDoc} */ public final int getMaximumCapacity() { return m_cbMax; } // ----- obtaining different "write views" to the buffer ---------------- /** * {@inheritDoc} */ public final BufferOutput getBufferOutput(int of) { return new ByteArrayBufferOutput(of); } // ----- accessing the buffered data ------------------------------------ /** * {@inheritDoc} */ public final ReadBuffer getUnsafeReadBuffer() { ByteArrayReadBuffer buf = m_bufUnsafe; if (buf == null) { m_bufUnsafe = buf = new ByteArrayReadBuffer(m_ab, 0, m_cb, false, isByteArrayPrivate(), false); } else { buf.updateLength(m_cb); } return buf; } /** * {@inheritDoc} * * For efficiency purposes, it is possible to obtain the internal byte * array that the ByteArrayWriteBuffer is using by calling {@link * #getRawByteArray()}; if the internal byte array is private (i.e. if it * cannot be exposed to the caller), then the result will be the same as * would be returned by toByteArray(). */ public final byte[] toByteArray() { int cb = m_cb; if (cb == 0) { return NO_BYTES; } byte[] ab = new byte[cb]; System.arraycopy(m_ab, 0, ab, 0, cb); return ab; } /** * {@inheritDoc} */ public Binary toBinary() { int cb = m_cb; if (cb == 0) { return NO_BINARY; } return new Binary(m_ab, 0, cb); } // ----- accessors ------------------------------------------------------ /** * Determine if the underlying byte[] should be treated as private data. * * @return true iff the underlying data should not ever be exposed by * this object */ public boolean isByteArrayPrivate() { return m_fPrivate; } /** * Make sure that the underlying byte[] will be treated as private data. */ public final void makeByteArrayPrivate() { m_fPrivate = true; m_bufUnsafe = null; } /** * Obtain the byte array that this WriteBuffer uses. If the underlying * byte array is private, then this method will always return a copy of * the portion of the byte array that this WriteBuffer represents as if * the called had called {@link #toByteArray()}. * * @return the byte array that this WriteBuffer uses */ public final byte[] getRawByteArray() { return isByteArrayPrivate() ? toByteArray() : m_ab; } // ----- internal ------------------------------------------------------- /** * {@inheritDoc} */ protected final int copyStream(int ofDest, InputStreaming stream, int cbMax) throws IOException { // see if it is a known implementation that we can optimize for if (stream instanceof ReadBuffer.BufferInput) { return copyBufferInputRemainder(ofDest, (ReadBuffer.BufferInput) stream, cbMax); } int ofOrig = ofDest; int cbRemain = cbMax; while (true) { // while there is additional capacity in the buffer, read byte[] ab = m_ab; int cbCap = Math.min(ab.length, ofDest + cbRemain); while (ofDest < cbCap) { int cbActual; try { cbActual = stream.read(ab, ofDest, cbCap - ofDest); } catch (EOFException e) { cbActual = -1; } if (cbActual < 0) { updateLength(ofDest); return ofDest - ofOrig; } else { ofDest += cbActual; cbRemain -= cbActual; } } if (cbRemain > 0) { // when out of room, grow updateLength(ofDest); grow(ofDest); } // once we reach max cap, just read one byte to prove overflow if (ofDest >= m_ab.length || cbRemain == 0) { if (stream.read() < 0) { // filled the capacity perfectly; no more data to read updateLength(ofDest); return ofDest - ofOrig; } else { throw new IOException("Overflow: write buffer limited to " + cbMax + " bytes, but input stream is not exhausted"); } } } } /** * Create a byte array of the specified size. The main reason to make this * into a separate method is a fact the native OOME comes without any stack trace. * * @param cb the specified size * * @return a byte array */ protected static byte[] createBytes(int cb) { try { return new byte[cb]; } catch (OutOfMemoryError e) { if (cb == Integer.MAX_VALUE) { throw new UnsupportedOperationException( "buffer has reached its max capacity of 2GB"); } throw new OutOfMemoryError( "Failed to allocate a byte array of the requested size: " + cb); } } /** * Validate the ranges for the passed bounds and make sure that the * underlying array is big enough to handle them. * * @param of the offset that data is about to be written to * @param cb the length of the data that is about to be written */ protected void checkBounds(int of, int cb) { int cbTotal = of + cb; if (of < 0 || cb < 0 || cbTotal > m_cbMax || cbTotal < 0) { boundsException(of, cb); } if (cbTotal > m_ab.length) { grow(cbTotal); } } /** * Raise an exception for the offset/length being out of bounds. This * code was moved out of checkBounds in order to encourage more aggressive * in-lining by the HotSpot JVM. * * @param of the current offset * @param cb the current length * * @throws IndexOutOfBoundsException always */ private void boundsException(int of, int cb) throws IndexOutOfBoundsException { if ((long) of + (long) cb > Integer.MAX_VALUE) { throw new UnsupportedOperationException( "buffer has reached its max capacity of 2GB"); } throw new IndexOutOfBoundsException("of=" + of + ", cb=" + cb + ", max=" + m_cbMax); } /** * Grow the underlying byte array to at least the specified size. * * @param cbCap the required or requested capacity */ protected final void grow(int cbCap) { // desired growth is 100% for "small" buffers and 50% for "huge" // minimum growth is 1KB byte[] abOld = m_ab; int cbOld = abOld.length; int cbAdd = Math.max(1024, cbOld > 0x100000 ? cbOld >>> 1 : cbOld); int cbNew = (int) Math.min(m_cbMax, Math.max(((long) cbCap) + 1024, ((long) cbOld) + cbAdd)); if (cbNew > cbOld) { // ensure that we don't allocate more than the configured maximum ExternalizableHelper.validateBufferSize(cbNew); byte[] abNew; while (true) { try { abNew = createBytes(cbNew); break; } catch (UnsupportedOperationException | OutOfMemoryError e) { if (cbCap == Integer.MAX_VALUE) { throw e; } // our pre-sizing was too aggressive; try to back down a bit cbNew -= (cbNew - cbCap) / 2; if (cbNew - 1 <= cbCap) { // create a new OOME to throw since the original one // most likely doesn't have any stack trace info throw e; } } } int cbData = m_cb; if (cbData > 0) { System.arraycopy(abOld, 0, abNew, 0, cbData); } m_ab = abNew; m_bufUnsafe = null; } } /** * Update the length if the passed length is greater than the current * buffer length. * * @param cb the count of the last byte written (or the index of the * next byte to write) */ protected final void updateLength(int cb) { if (cb > m_cb) { m_cb = cb; } } // ----- inner class: ByteArrayBufferOutput ---------------------------------- /** * ByteArrayBufferOutput is an implementation of BufferOutput optimized * for writing to the buffer's underlying byte array. * * @author cp 2005.03.25 */ public final class ByteArrayBufferOutput extends AbstractBufferOutput { // ----- constructors ------------------------------------------- /** * Construct an ByteArrayBufferOutput that will begin writing at the * start of the containing WriteBuffer. */ public ByteArrayBufferOutput() { } /** * Construct an ByteArrayBufferOutput that will begin writing at the * specified offset within the containing WriteBuffer. * * @param of the offset at which to begin writing */ public ByteArrayBufferOutput(int of) { super(of); } // ----- DataOutput methods ------------------------------------- /** * {@inheritDoc} */ public void writeShort(int n) throws IOException { int of = m_ofWrite; checkBounds(of, 2); byte[] ab = m_ab; ab[of] = (byte) (n >>> 8); ab[of + 1] = (byte) (n); moveOffset(2); } /** * {@inheritDoc} */ public void writeInt(int n) throws IOException { int of = m_ofWrite; checkBounds(of, 4); byte[] ab = m_ab; ab[of ] = (byte) (n >>> 24); ab[of + 1] = (byte) (n >>> 16); ab[of + 2] = (byte) (n >>> 8); ab[of + 3] = (byte) (n); moveOffset(4); } /** * {@inheritDoc} */ public void writeLong(long l) throws IOException { int of = m_ofWrite; checkBounds(of, 8); byte[] ab = m_ab; // hi word int n = (int) (l >>> 32); ab[of ] = (byte) (n >>> 24); ab[of + 1] = (byte) (n >>> 16); ab[of + 2] = (byte) (n >>> 8); ab[of + 3] = (byte) (n); // lo word n = (int) l; ab[of + 4] = (byte) (n >>> 24); ab[of + 5] = (byte) (n >>> 16); ab[of + 6] = (byte) (n >>> 8); ab[of + 7] = (byte) (n); moveOffset(8); } /** * {@inheritDoc} */ @SuppressWarnings("deprecation") public void writeBytes(String s) throws IOException { int of = m_ofWrite; int cb = s.length(); checkBounds(of, cb); s.getBytes(0, cb, m_ab, of); // deprecated, but avoids encoding moveOffset(cb); } /** * {@inheritDoc} */ public void writeChars(String s) throws IOException { int cch = s.length(); int of = m_ofWrite; int cb = cch << 1; checkBounds(of, cb); byte[] ab = m_ab; for (int ofch = 0; ofch < cch; ++ofch) { char ch = s.charAt(ofch); ab[of] = (byte) (ch >>> 8); ab[of + 1] = (byte) (ch); of += 2; } moveOffset(cb); } /** * {@inheritDoc} */ public void writeUTF(String s) throws IOException { if (s.isEmpty()) { // 0-length UTF (Java UTF has a 2-byte length indicator) writeShort(0); } else { // estimate the length (in bytes) of the resulting UTF (see writeSafeUTF below for details) int cbEstimate = s.length(); int ofHeader = m_ofWrite; // Java UTF binary format has only 2 bytes for length; fail fast if we know we'll be attempting // to write more than 64K of data if (cbEstimate > 0xFFFF) { throw new UTFDataFormatException("UTF binary length=" + cbEstimate + ", max=65535"); } // now that we have a rough idea of the UTF length, make sure the buffer // is big enough; in theory, although unlikely, each character could use 3 bytes, // so we need to assume the worst-case scenario (4-byte characters are counted as two // characters, high and low surrogate, by String.length(), so that case is accounted for) int ofValue = ofHeader + 2; checkBounds(ofValue, cbEstimate * 3); // write the UTF directly into the buffer int cb = formatModifiedUTF(s, m_ab, ofValue); // Java UTF binary format has only 2 bytes for length if (cb > 0xFFFF) { throw new UTFDataFormatException("UTF binary length=" + cbEstimate + ", max=65535"); } // write the UTF header (the length in bytes) m_ab[ofHeader] = (byte) (cb >>> 8); m_ab[ofHeader + 1] = (byte) (cb); moveOffset(2 + cb); } } public void writeUTF(CharBuffer bufCh) throws IOException { if (bufCh.isEmpty()) { // 0-length UTF (Java UTF has a 2-byte length indicator) writeShort(0); } else { // estimate the length (in bytes) of the resulting UTF (see writeSafeUTF below for details) int cbEstimate = bufCh.length(); int ofHeader = m_ofWrite; // Java UTF binary format has only 2 bytes for length; fail fast if we know we'll be attempting // to write more than 64K of data if (cbEstimate > 0xFFFF) { throw new UTFDataFormatException("UTF binary length=" + cbEstimate + ", max=65535"); } // now that we have a rough idea of the UTF length, make sure the buffer // is big enough; in theory, although unlikely, each character could use 3 bytes, // so we need to assume the worst-case scenario (4-byte characters are counted as two // characters, high and low surrogate, by String.length(), so that case is accounted for) int ofValue = ofHeader + 2; checkBounds(ofValue, cbEstimate * 3); // write the UTF directly into the buffer int cb = formatModifiedUTF(bufCh, m_ab, ofValue); // Java UTF binary format has only 2 bytes for length if (cb > 0xFFFF) { throw new UTFDataFormatException("UTF binary length=" + cbEstimate + ", max=65535"); } else { // write the UTF header (the length in bytes) m_ab[ofHeader] = (byte) (cb >>> 8); m_ab[ofHeader + 1] = (byte) (cb); } moveOffset(2 + cb); } } // ----- BufferOutput methods ----------------------------------- /** * {@inheritDoc} */ public ByteBuffer getByteBuffer(int cb) { int of = m_ofWrite; checkBounds(of, cb); moveOffset(cb); return ByteBuffer.wrap(m_ab, of, cb); } /** * {@inheritDoc} */ public void writeSafeUTF(String s) throws IOException { if (s == null) { writePackedInt(-1); } else { if (s.isEmpty()) { writePackedInt(0); } else if (isLatin1(s)) { byte[] abString = value(s); if (isAscii(abString)) { // String uses ASCII encoding (characters 0x00-0x7F), which are represented using // single byte UTF-8 format, so we can write value array directly writePackedInt(abString.length); write(abString); } else { // we need to convert all negative bytes (0x80-0xFF) into 2-byte UTF-8 format int cb = abString.length + countNegatives(abString); writePackedInt(cb); int of = m_ofWrite; checkBounds(of, cb); byte[] ab = m_ab; for (byte b : abString) { if (b < 0) { ab[of++] = (byte) (0xC0 | ((b & 0xFF) >> 6)); ab[of++] = (byte) (0x80 | (b & 0x3F)); } else { ab[of++] = b; } } moveOffset(cb); } } else { // The tricky part here is that we need to write encoded string length in bytes *before* we actually // encode string, but we won't know the actual length in bytes *until* we encode it. // // We are going to assume that: // // 1. In most cases we are dealing with Latin1 encoded strings, with no multi-byte characters // 2. Even when we are not, the number of bytes written will never be *less* then the string length // // Because of this, we will write the actual string length into the buffer, which is the correct value // to write if string is a Latin1 string, or close enough to the final result that we can simply replace // it with the correct value at the end int cbEstimate = s.length(); int ofHeader = m_ofWrite; // write the UTF header (the estimated length in bytes) writePackedInt(cbEstimate); // now that we have a rough idea of the UTF length, make sure the buffer // is big enough; in theory, although unlikely, each character could use 3 bytes, // so we need to assume the worst-case scenario (4-byte characters are counted as two // characters, high and low surrogate, by String.length(), so that case is accounted for) int ofValue = m_ofWrite; checkBounds(ofValue, cbEstimate * 3); // write the UTF directly into the buffer int cb = formatUTF(s, m_ab, ofValue); if (cb != cbEstimate) { // unfortunately, we wrote more bytes than we assumed would be the case, // so we need to fix the header int cbHeader = ofValue - ofHeader; int cbActual = cb < 0x40 ? 1 : (39 - Integer.numberOfLeadingZeros(cb)) / 7; if (cbHeader != cbActual) { // bad news: we need more bytes for the header, so we need to move the value :-( checkBounds(ofHeader, cbActual + cb); int ofValueNew = ofValue + cbActual - cbHeader; System.arraycopy(m_ab, ofValue, m_ab, ofValueNew, cb); ofValue = ofValueNew; } // now we can simply overwrite the header with the actual number of bytes m_ofWrite = ofHeader; writePackedInt(cb); m_ofWrite = ofValue; } moveOffset(cb); } } } public void writeSafeUTF(CharBuffer bufCh) throws IOException { if (bufCh.isEmpty()) { writePackedInt(0); } else { // The tricky part here is that we need to write encoded string length in bytes *before* we actually // encode string, but we won't know the actual length in bytes *until* we encode it. // // We are going to assume that: // // 1. In most cases we are dealing with Latin1 encoded strings, with no multi-byte characters // 2. Even when we are not, the number of bytes written will never be *less* then the string length // // Because of this, we will write the actual string length into the buffer, which is the correct value // to write if string is a Latin1 string, or close enough to the final result that we can simply replace // it with the correct value at the end int cbEstimate = bufCh.length(); int ofHeader = m_ofWrite; // write the UTF header (the estimated length in bytes) writePackedInt(cbEstimate); // now that we have a rough idea of the UTF length, make sure the buffer // is big enough; in theory, although unlikely, each character could use 4 bytes, // so we need to assume the worst-case scenario int ofValue = m_ofWrite; checkBounds(ofValue, cbEstimate << 2); // write the UTF directly into the buffer int cb = formatUTF(bufCh, m_ab, ofValue); if (cb != cbEstimate) { // unfortunately, we wrote more bytes than we assumed would be the case, // so we need to fix the header int cbHeader = ofValue - ofHeader; int cbActual = cb < 0x40 ? 1 : (39 - Integer.numberOfLeadingZeros(cb)) / 7; if (cbHeader != cbActual) { // bad news: we need more bytes for the header, so we need to move the value :-( checkBounds(ofHeader, cbActual + cb); int ofValueNew = ofValue + cbActual - cbHeader; System.arraycopy(m_ab, ofValue, m_ab, ofValueNew, cb); ofValue = ofValueNew; } // now we can simply overwrite the header with the actual number of bytes m_ofWrite = ofHeader; writePackedInt(cb); m_ofWrite = ofValue; } moveOffset(cb); } } /** * {@inheritDoc} */ public void writePackedInt(int n) throws IOException { // first byte contains sign bit (bit 7 set if neg) int b = 0; if (n < 0) { b = 0x40; n = ~n; } // now that the value is positive, check its magnitude and see // how many bytes it will take to store int ofb = m_ofWrite; checkBounds(ofb, n < 0x40 ? 1 : (39 - Integer.numberOfLeadingZeros(n)) / 7); byte[] ab = m_ab; int ofOrig = ofb; // first byte contains only 6 data bits b |= (byte) (n & 0x3F); n >>>= 6; while (n != 0) { b |= 0x80; // bit 8 is a continuation bit ab[ofb++] = (byte) b; b = (n & 0x7F); n >>>= 7; } ab[ofb++] = (byte) b; moveOffset(ofb - ofOrig); } /** * {@inheritDoc} */ public void writePackedLong(long l) throws IOException { // first byte contains sign bit (bit 7 set if neg) int b = 0; if (l < 0) { b = 0x40; l = ~l; } // now that the value is positive, check its magnitude and see // how many bytes it will take to store int ofb = m_ofWrite; checkBounds(ofb, l < 0x40 ? 1 : (71 - Long.numberOfLeadingZeros(l)) / 7); byte[] ab = m_ab; int ofOrig = ofb; // first byte contains only 6 data bits b |= (byte) (((int) l) & 0x3F); l >>>= 6; while (l != 0) { b |= 0x80; // bit 8 is a continuation bit ab[ofb++] = (byte) b; b = (((int) l) & 0x7F); l >>>= 7; } ab[ofb++] = (byte) b; moveOffset(ofb - ofOrig); } // ----- internal ----------------------------------------------- /** * Move the offset within the stream forward. * * @param cb the number of bytes to advance the offset */ private void moveOffset(int cb) { int of = m_ofWrite + cb; m_ofWrite = of; updateLength(of); } } // ----- inner class: Allocator ----------------------------------------- /** * Allocator is a WriteBufferPool implementation which allocates a new * ByteArrayWriteBuffer on each request to the pool, and does not retain * the returned buffer. Essentially it is dummy pool which acts as an * allocator. */ public static class Allocator implements MultiBufferWriteBuffer.WriteBufferPool { // ----- constructors ------------------------------------------- /** * Construct an Allocator for ByteArrayWriteBuffers of a given size. * * @param cb the capacity of the ByteArrayWriteBuffer to be allocated */ public Allocator(int cb) { m_cb = cb; } // ----- WriteBufferPool interface ------------------------------ /** * {@inheritDoc} */ public int getMaximumCapacity() { return m_cb; } /** * Allocate a new ByteArrayWriteBuffer. * * @param cbPreviousTotal <i>unused</i> * * @return a new ByteArrayWriteBuffer with this Allocator's * {@link #getMaximumCapacity() capacity} */ public WriteBuffer allocate(int cbPreviousTotal) { return new ByteArrayWriteBuffer(createBytes(m_cb)); } /** * Release the supplied buffer into the pool. * <p> * This method is a no op. * * @param buffer <i>unused</i> */ public void release(WriteBuffer buffer) { // no op } // ----- data members ------------------------------------------- /** * The capacity of the ByteArrayWriteBuffer instances to allocate. */ protected int m_cb; } // ----- data members --------------------------------------------------- /** * The byte array that holds the binary data. */ protected byte[] m_ab; /** * Number of bytes in the byte array that have been written by this * WriteBuffer. This is the length. */ protected int m_cb; /** * Number of bytes that the byte array can be grown to. This is the * maximum capacity. */ protected int m_cbMax; /** * Cached ReadBuffer to quickly provide an answer to * {@link #getUnsafeReadBuffer()}. */ protected transient ByteArrayReadBuffer m_bufUnsafe; /** * Specifies whether or not the byte array is treated as private data. */ private boolean m_fPrivate; }
google/cel-java
36,905
checker/src/main/java/dev/cel/checker/ExprChecker.java
// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dev.cel.checker; import static com.google.common.base.Preconditions.checkNotNull; import dev.cel.expr.CheckedExpr; import dev.cel.expr.ParsedExpr; import dev.cel.expr.Type; import com.google.auto.value.AutoValue; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.errorprone.annotations.CheckReturnValue; import dev.cel.common.CelAbstractSyntaxTree; import dev.cel.common.CelContainer; import dev.cel.common.CelFunctionDecl; import dev.cel.common.CelOverloadDecl; import dev.cel.common.CelProtoAbstractSyntaxTree; import dev.cel.common.annotations.Internal; import dev.cel.common.ast.CelConstant; import dev.cel.common.ast.CelExpr; import dev.cel.common.ast.CelReference; import dev.cel.common.types.CelKind; import dev.cel.common.types.CelProtoTypes; import dev.cel.common.types.CelType; import dev.cel.common.types.CelTypes; import dev.cel.common.types.ListType; import dev.cel.common.types.MapType; import dev.cel.common.types.OptionalType; import dev.cel.common.types.SimpleType; import dev.cel.common.types.TypeType; import dev.cel.parser.Operator; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import org.jspecify.annotations.Nullable; /** * The expression type checker. * * <p>CEL-Java library internals. Do not use. * * @deprecated Please migrate to CEL-Java Fluent APIs instead. See {@code CelCompilerFactory}. */ @Internal @Deprecated public final class ExprChecker { /** * Deprecated type-check API. * * @deprecated Do not use. CEL-Java users should leverage the Fluent APIs instead. See {@code * CelCompilerFactory}. */ @CheckReturnValue @Deprecated public static CheckedExpr check(Env env, String inContainer, ParsedExpr parsedExpr) { return typecheck(env, inContainer, parsedExpr, Optional.absent()); } /** * Deprecated type-check API. * * @deprecated Do not use. CEL-Java users should leverage the Fluent APIs instead. See {@code * CelCompilerFactory}. */ @CheckReturnValue @Deprecated public static CheckedExpr typecheck( Env env, String inContainer, ParsedExpr parsedExpr, Optional<Type> expectedResultType) { Optional<CelType> type = expectedResultType.isPresent() ? Optional.of(CelProtoTypes.typeToCelType(expectedResultType.get())) : Optional.absent(); CelAbstractSyntaxTree ast = typecheck( env, CelContainer.ofName(inContainer), CelProtoAbstractSyntaxTree.fromParsedExpr(parsedExpr).getAst(), type); if (ast.isChecked()) { return CelProtoAbstractSyntaxTree.fromCelAst(ast).toCheckedExpr(); } return CheckedExpr.newBuilder() .setExpr(parsedExpr.getExpr()) .setSourceInfo(parsedExpr.getSourceInfo()) .build(); } /** * Type-checks the parsed expression within the given environment and returns a checked * expression. If an expected result type was given, then it verifies that that type matches the * actual result type. Conditions for type checking and the constructed {@link CheckedExpr} are * described in checked.proto. * * <p>CEL Library Internals. Do not use. CEL-Java users should use the Fluent APIs instead. */ @CheckReturnValue @Internal public static CelAbstractSyntaxTree typecheck( Env env, CelContainer container, CelAbstractSyntaxTree ast, Optional<CelType> expectedResultType) { env.resetTypeAndRefMaps(); final ExprChecker checker = new ExprChecker( env, container, ast.getSource().getPositionsMap(), new InferenceContext(), env.enableCompileTimeOverloadResolution(), env.enableHomogeneousLiterals(), env.enableNamespacedDeclarations()); CelExpr expr = checker.visit(ast.getExpr()); if (expectedResultType.isPresent()) { checker.assertType(expr, expectedResultType.get()); } // Walk over the final type map substituting any type parameters either by their bound value or // by DYN. Map<Long, CelType> typeMap = Maps.transformValues(env.getTypeMap(), checker.inferenceContext::finalize); return CelAbstractSyntaxTree.newCheckedAst(expr, ast.getSource(), env.getRefMap(), typeMap); } private final Env env; private final TypeProvider typeProvider; private final CelContainer container; private final Map<Long, Integer> positionMap; private final InferenceContext inferenceContext; private final boolean compileTimeOverloadResolution; private final boolean homogeneousLiterals; private final boolean namespacedDeclarations; private ExprChecker( Env env, CelContainer container, Map<Long, Integer> positionMap, InferenceContext inferenceContext, boolean compileTimeOverloadResolution, boolean homogeneousLiterals, boolean namespacedDeclarations) { this.env = checkNotNull(env); this.typeProvider = env.getTypeProvider(); this.positionMap = checkNotNull(positionMap); this.container = checkNotNull(container); this.inferenceContext = checkNotNull(inferenceContext); this.compileTimeOverloadResolution = compileTimeOverloadResolution; this.homogeneousLiterals = homogeneousLiterals; this.namespacedDeclarations = namespacedDeclarations; } /** Visit the {@code expr} value, routing to overloads based on the kind of expression. */ @CheckReturnValue public CelExpr visit(CelExpr expr) { switch (expr.exprKind().getKind()) { case CONSTANT: return visit(expr, expr.constant()); case IDENT: return visit(expr, expr.ident()); case SELECT: return visit(expr, expr.select()); case CALL: return visit(expr, expr.call()); case LIST: return visit(expr, expr.list()); case STRUCT: return visit(expr, expr.struct()); case MAP: return visit(expr, expr.map()); case COMPREHENSION: return visit(expr, expr.comprehension()); default: throw new IllegalArgumentException("unexpected expr kind"); } } @CheckReturnValue private CelExpr visit(CelExpr expr, CelConstant constant) { switch (constant.getKind()) { case INT64_VALUE: env.setType(expr, SimpleType.INT); break; case UINT64_VALUE: env.setType(expr, SimpleType.UINT); break; case STRING_VALUE: env.setType(expr, SimpleType.STRING); break; case BYTES_VALUE: env.setType(expr, SimpleType.BYTES); break; case BOOLEAN_VALUE: env.setType(expr, SimpleType.BOOL); break; case NULL_VALUE: env.setType(expr, SimpleType.NULL_TYPE); break; case DOUBLE_VALUE: env.setType(expr, SimpleType.DOUBLE); break; case TIMESTAMP_VALUE: env.setType(expr, SimpleType.TIMESTAMP); break; case DURATION_VALUE: env.setType(expr, SimpleType.DURATION); break; default: throw new IllegalArgumentException("unexpected constant case: " + constant.getKind()); } return expr; } @CheckReturnValue private CelExpr visit(CelExpr expr, CelExpr.CelIdent ident) { CelIdentDecl decl = env.lookupIdent(expr.id(), getPosition(expr), container, ident.name()); checkNotNull(decl); if (decl.equals(Env.ERROR_IDENT_DECL)) { // error reported env.setType(expr, SimpleType.ERROR); env.setRef(expr, makeReference(decl)); return expr; } if (!decl.name().equals(ident.name())) { // Overwrite the identifier with its fully qualified name. expr = replaceIdentSubtree(expr, decl.name()); } env.setType(expr, decl.type()); env.setRef(expr, makeReference(decl)); return expr; } @CheckReturnValue private CelExpr visit(CelExpr expr, CelExpr.CelSelect select) { // Before traversing down the tree, try to interpret as qualified name. String qname = asQualifiedName(expr); if (qname != null) { CelIdentDecl decl = env.tryLookupCelIdent(container, qname); if (decl != null) { if (select.testOnly()) { env.reportError(expr.id(), getPosition(expr), "expression does not select a field"); env.setType(expr, SimpleType.BOOL); } else { if (namespacedDeclarations) { // Rewrite the node to be a variable reference to the resolved fully-qualified // variable name. expr = replaceIdentSubtree(expr, decl.name()); } env.setType(expr, decl.type()); env.setRef(expr, makeReference(decl)); } return expr; } } // Interpret as field selection, first traversing down the operand. CelExpr visitedOperand = visit(select.operand()); if (namespacedDeclarations && !select.operand().equals(visitedOperand)) { // Subtree has been rewritten. Replace the operand. expr = replaceSelectOperandSubtree(expr, visitedOperand); } CelType resultType = visitSelectField(expr, visitedOperand, select.field(), false); if (select.testOnly()) { resultType = SimpleType.BOOL; } env.setType(expr, resultType); return expr; } @CheckReturnValue private CelExpr visit(CelExpr expr, CelExpr.CelCall call) { String functionName = call.function(); if (Operator.OPTIONAL_SELECT.getFunction().equals(functionName)) { return visitOptionalCall(expr, call); } // Traverse arguments. ImmutableList<CelExpr> argsList = call.args(); for (int i = 0; i < argsList.size(); i++) { CelExpr arg = argsList.get(i); CelExpr visitedArg = visit(arg); if (namespacedDeclarations && !visitedArg.equals(arg)) { // Argument has been overwritten. expr = replaceCallArgumentSubtree(expr, visitedArg, i); } } int position = getPosition(expr); OverloadResolution resolution; if (!call.target().isPresent()) { // Regular static call with simple name. CelFunctionDecl decl = env.lookupFunction(expr.id(), position, container, call.function()); resolution = resolveOverload(expr.id(), position, decl, null, call.args()); if (!decl.name().equals(call.function())) { if (namespacedDeclarations) { // Overwrite the function name with its fully qualified resolved name. expr = replaceCallSubtree(expr, decl.name()); } } } else { // Check whether the target is actually a qualified name for a static function. String qualifiedName = asQualifiedName(call.target().get()); CelFunctionDecl decl = env.tryLookupCelFunction(container, qualifiedName + "." + call.function()); if (decl != null) { resolution = resolveOverload(expr.id(), position, decl, null, call.args()); if (namespacedDeclarations) { // The function name is namespaced and so preserving the target operand would // be an inaccurate representation of the desired evaluation behavior. // Overwrite with fully-qualified resolved function name sans receiver target. expr = replaceCallSubtree(expr, decl.name()); } } else { // Regular instance call. CelExpr target = call.target().get(); CelExpr visitedTargetExpr = visit(target); if (namespacedDeclarations && !visitedTargetExpr.equals(target)) { // Visiting target contained a namespaced function. Rewrite the call expression here by // setting the target to the new subtree. expr = replaceCallSubtree(expr, visitedTargetExpr); } resolution = resolveOverload( expr.id(), position, env.lookupFunction(expr.id(), getPosition(expr), container, call.function()), target, call.args()); } } env.setType(expr, resolution.type()); env.setRef(expr, resolution.reference()); return expr; } @CheckReturnValue private CelExpr visit(CelExpr expr, CelExpr.CelStruct struct) { // Determine the type of the message. CelType messageType = SimpleType.ERROR; CelIdentDecl decl = env.lookupIdent(expr.id(), getPosition(expr), container, struct.messageName()); if (!struct.messageName().equals(decl.name())) { expr = expr.toBuilder() .setStruct(struct.toBuilder().setMessageName(decl.name()).build()) .build(); } env.setRef(expr, CelReference.newBuilder().setName(decl.name()).build()); CelType type = decl.type(); if (type.kind() != CelKind.ERROR) { if (type.kind() != CelKind.TYPE) { // expected type of types env.reportError(expr.id(), getPosition(expr), "'%s' is not a type", CelTypes.format(type)); } else { messageType = ((TypeType) type).type(); if (messageType.kind() != CelKind.STRUCT) { env.reportError( expr.id(), getPosition(expr), "'%s' is not a message type", CelTypes.format(messageType)); messageType = SimpleType.ERROR; } } } // When the type is well-known mark the expression with the CEL type rather than the proto type. if (Env.isWellKnownType(messageType)) { env.setType(expr, Env.getWellKnownType(messageType)); } else { env.setType(expr, messageType); } // Check the field initializers. ImmutableList<CelExpr.CelStruct.Entry> entriesList = struct.entries(); for (int i = 0; i < entriesList.size(); i++) { CelExpr.CelStruct.Entry entry = entriesList.get(i); CelExpr visitedValueExpr = visit(entry.value()); if (namespacedDeclarations && !visitedValueExpr.equals(entry.value())) { // Subtree has been rewritten. Replace the struct value. expr = replaceStructEntryValueSubtree(expr, visitedValueExpr, i); } CelType fieldType = getFieldType(entry.id(), getPosition(entry), messageType, entry.fieldKey()).celType(); CelType valueType = env.getType(visitedValueExpr); if (entry.optionalEntry()) { if (valueType instanceof OptionalType) { valueType = unwrapOptional(valueType); } else { assertIsAssignable( visitedValueExpr.id(), getPosition(visitedValueExpr), valueType, OptionalType.create(valueType)); } } if (!inferenceContext.isAssignable(fieldType, valueType)) { env.reportError( expr.id(), getPosition(entry), "expected type of field '%s' is '%s' but provided type is '%s'", entry.fieldKey(), CelTypes.format(fieldType), CelTypes.format(valueType)); } } return expr; } @CheckReturnValue private CelExpr visit(CelExpr expr, CelExpr.CelMap map) { CelType mapKeyType = null; CelType mapValueType = null; ImmutableList<CelExpr.CelMap.Entry> entriesList = map.entries(); for (int i = 0; i < entriesList.size(); i++) { CelExpr.CelMap.Entry entry = entriesList.get(i); CelExpr visitedMapKeyExpr = visit(entry.key()); if (namespacedDeclarations && !visitedMapKeyExpr.equals(entry.key())) { // Subtree has been rewritten. Replace the map key. expr = replaceMapEntryKeySubtree(expr, visitedMapKeyExpr, i); } mapKeyType = joinTypes( visitedMapKeyExpr.id(), getPosition(visitedMapKeyExpr), mapKeyType, env.getType(visitedMapKeyExpr)); CelExpr visitedValueExpr = visit(entry.value()); if (namespacedDeclarations && !visitedValueExpr.equals(entry.value())) { // Subtree has been rewritten. Replace the map value. expr = replaceMapEntryValueSubtree(expr, visitedValueExpr, i); } CelType valueType = env.getType(visitedValueExpr); if (entry.optionalEntry()) { if (valueType instanceof OptionalType) { valueType = unwrapOptional(valueType); } else { assertIsAssignable( visitedValueExpr.id(), getPosition(visitedValueExpr), valueType, OptionalType.create(valueType)); } } mapValueType = joinTypes(visitedValueExpr.id(), getPosition(visitedValueExpr), mapValueType, valueType); } if (mapKeyType == null) { // If the map is empty, assign free type variables to key and value type. mapKeyType = inferenceContext.newTypeVar("key"); mapValueType = inferenceContext.newTypeVar("value"); } env.setType(expr, MapType.create(mapKeyType, mapValueType)); return expr; } @CheckReturnValue private CelExpr visit(CelExpr expr, CelExpr.CelList list) { CelType elemsType = null; ImmutableList<CelExpr> elementsList = list.elements(); HashSet<Integer> optionalIndices = new HashSet<>(list.optionalIndices()); for (int i = 0; i < elementsList.size(); i++) { CelExpr visitedElem = visit(elementsList.get(i)); if (namespacedDeclarations && !visitedElem.equals(elementsList.get(i))) { // Subtree has been rewritten. Replace the list element expr = replaceListElementSubtree(expr, visitedElem, i); } CelType elemType = env.getType(visitedElem); if (optionalIndices.contains(i)) { if (elemType instanceof OptionalType) { elemType = unwrapOptional(elemType); } else { assertIsAssignable( visitedElem.id(), getPosition(visitedElem), elemType, OptionalType.create(elemType)); } } elemsType = joinTypes(visitedElem.id(), getPosition(visitedElem), elemsType, elemType); } if (elemsType == null) { // If the list is empty, assign free type var to elem type. elemsType = inferenceContext.newTypeVar("elem"); } env.setType(expr, ListType.create(elemsType)); return expr; } @CheckReturnValue private CelExpr visit(CelExpr expr, CelExpr.CelComprehension compre) { CelExpr visitedRange = visit(compre.iterRange()); CelExpr visitedInit = visit(compre.accuInit()); CelType accuType = env.getType(visitedInit); CelType rangeType = inferenceContext.specialize(env.getType(visitedRange)); CelType varType; CelType varType2 = null; switch (rangeType.kind()) { case LIST: varType = ((ListType) rangeType).elemType(); if (!Strings.isNullOrEmpty(compre.iterVar2())) { varType2 = varType; varType = SimpleType.INT; } break; case MAP: // Ranges over the keys. varType = ((MapType) rangeType).keyType(); if (!Strings.isNullOrEmpty(compre.iterVar2())) { varType2 = ((MapType) rangeType).valueType(); } break; case DYN: case ERROR: varType = SimpleType.DYN; varType2 = SimpleType.DYN; break; case TYPE_PARAM: // Mark the range as DYN to avoid its free variable being associated with the wrong type // based on an earlier or later use. The isAssignable call will ensure that type // substitutions are updated for the type param. inferenceContext.isAssignable(SimpleType.DYN, rangeType); // Mark the variable type as DYN. varType = SimpleType.DYN; varType2 = SimpleType.DYN; break; default: env.reportError( expr.id(), getPosition(visitedRange), "expression of type '%s' cannot be range of a comprehension " + "(must be list, map, or dynamic)", CelTypes.format(rangeType)); varType = SimpleType.DYN; varType2 = SimpleType.DYN; break; } // Declare accumulation variable on outer scope. env.enterScope(); env.add(CelIdentDecl.newIdentDeclaration(compre.accuVar(), accuType)); // Declare iteration variable on inner scope. env.enterScope(); env.add(CelIdentDecl.newIdentDeclaration(compre.iterVar(), varType)); if (!Strings.isNullOrEmpty(compre.iterVar2())) { env.add(CelIdentDecl.newIdentDeclaration(compre.iterVar2(), varType2)); } CelExpr condition = visit(compre.loopCondition()); assertType(condition, SimpleType.BOOL); CelExpr visitedStep = visit(compre.loopStep()); assertType(visitedStep, accuType); // Forget iteration variable, as result expression must only depend on accu. env.exitScope(); CelExpr visitedResult = visit(compre.result()); env.exitScope(); if (namespacedDeclarations) { if (!visitedRange.equals(compre.iterRange())) { expr = replaceComprehensionRangeSubtree(expr, visitedRange); } if (!visitedInit.equals(compre.accuInit())) { expr = replaceComprehensionAccuInitSubtree(expr, visitedInit); } if (!visitedStep.equals(compre.loopStep())) { expr = replaceComprehensionStepSubtree(expr, visitedStep); } if (!visitedResult.equals(compre.result())) { expr = replaceComprehensionResultSubtree(expr, visitedResult); } } env.setType(expr, inferenceContext.specialize(env.getType(visitedResult))); return expr; } private CelReference makeReference(CelIdentDecl decl) { CelReference.Builder ref = CelReference.newBuilder().setName(decl.name()); if (decl.constant().isPresent()) { ref.setValue(decl.constant().get()); } return ref.build(); } private OverloadResolution resolveOverload( long callExprId, int position, @Nullable CelFunctionDecl function, @Nullable CelExpr target, List<CelExpr> args) { if (function == null || function.equals(Env.ERROR_FUNCTION_DECL)) { // Error reported, just return error value. return OverloadResolution.of(CelReference.newBuilder().build(), SimpleType.ERROR); } List<CelType> argTypes = new ArrayList<>(); if (target != null) { argTypes.add(env.getType(target)); } for (CelExpr arg : args) { argTypes.add(env.getType(arg)); } CelType resultType = null; // For most common result type. String firstCandString = null; CelReference.Builder refBuilder = CelReference.newBuilder(); List<String> excludedCands = new ArrayList<>(); String expectedString = TypeFormatter.formatFunction(inferenceContext.specialize(argTypes), target != null); for (CelOverloadDecl overload : function.overloads()) { boolean isInstance = overload.isInstanceFunction(); if ((target == null && isInstance) || (target != null && !isInstance)) { // not a compatible call style. continue; } CelType overloadType = CelTypes.createFunctionType(overload.resultType(), overload.parameterTypes()); if (!overload.typeParameterNames().isEmpty()) { // Instantiate overload's type with fresh type variables. overloadType = inferenceContext.newInstance(overload.typeParameterNames(), overloadType); } ImmutableList<CelType> candArgTypes = overloadType.parameters().subList(1, overloadType.parameters().size()); String candString = TypeFormatter.formatFunction(inferenceContext.specialize(candArgTypes), target != null); if (inferenceContext.isAssignable(argTypes, candArgTypes)) { // Collect overload id. refBuilder.addOverloadIds(overload.overloadId()); if (resultType == null) { // First matching overload, determines result type. resultType = inferenceContext.specialize(overloadType.parameters().get(0)); firstCandString = candString; } else { // More than one matching overload in non-strict mode, narrow result type to DYN unless // the overload type matches the previous result type. CelType fnResultType = inferenceContext.specialize(overloadType).parameters().get(0); if (!Types.isDyn(resultType) && !resultType.equals(fnResultType)) { // TODO: Consider joining result types of successful candidates when the // types are assignable, but not the same. Note, type assignability checks here seem to // mutate the type substitutions list in unexpected ways that result in errant results. resultType = SimpleType.DYN; } if (compileTimeOverloadResolution) { // In compile-time overload resolution mode report this situation as an error. env.reportError( callExprId, position, "found more than one matching overload for '%s' applied to '%s': %s and also %s", function.name(), expectedString, firstCandString, candString); } } } else { excludedCands.add(candString); } } if (resultType == null) { env.reportError( callExprId, position, "found no matching overload for '%s' applied to '%s'%s", function.name(), expectedString, excludedCands.isEmpty() ? "" : " (candidates: " + Joiner.on(',').join(excludedCands) + ")"); resultType = SimpleType.ERROR; } return OverloadResolution.of(refBuilder.build(), resultType); } // Return value from visit is not needed as the subtree is not rewritten here. @SuppressWarnings("CheckReturnValue") private CelType visitSelectField( CelExpr expr, CelExpr operand, String field, boolean isOptional) { CelType operandType = inferenceContext.specialize(env.getType(operand)); CelType resultType = SimpleType.ERROR; if (operandType instanceof OptionalType) { isOptional = true; operandType = unwrapOptional(operandType); } if (!Types.isDynOrError(operandType)) { if (operandType.kind() == CelKind.STRUCT) { TypeProvider.FieldType fieldType = getFieldType(expr.id(), getPosition(expr), operandType, field); // Type of the field resultType = fieldType.celType(); } else if (operandType.kind() == CelKind.MAP) { resultType = ((MapType) operandType).valueType(); } else if (operandType.kind() == CelKind.TYPE_PARAM) { // Mark the operand as type DYN to avoid cases where the free type variable might take on // an incorrect type if used in multiple locations. // // The assignability test will update the type substitutions for the type parameter with the // type DYN. This ensures that the operand type is appropriately flagged as DYN even though // it has already been assigned a TypeParam type from an earlier call flow. inferenceContext.isAssignable(SimpleType.DYN, operandType); // Mark the result type as DYN since no meaningful type inferences can be made from this // field selection. resultType = SimpleType.DYN; } else { env.reportError( expr.id(), getPosition(expr), "type '%s' does not support field selection", CelTypes.format(operandType)); } } else { resultType = SimpleType.DYN; } // If the target type was optional coming in, then the result must be optional going out. if (isOptional) { resultType = OptionalType.create(resultType); } return resultType; } private CelExpr visitOptionalCall(CelExpr expr, CelExpr.CelCall call) { CelExpr operand = call.args().get(0); CelExpr field = call.args().get(1); if (!field.exprKind().getKind().equals(CelExpr.ExprKind.Kind.CONSTANT) || field.constant().getKind() != CelConstant.Kind.STRING_VALUE) { env.reportError(expr.id(), getPosition(field), "unsupported optional field selection"); return expr; } CelExpr visitedOperand = visit(operand); if (namespacedDeclarations && !operand.equals(visitedOperand)) { // Subtree has been rewritten. Replace the operand. expr = replaceCallArgumentSubtree(expr, visitedOperand, 0); } CelType resultType = visitSelectField(expr, operand, field.constant().stringValue(), true); env.setType(expr, resultType); env.setRef(expr, CelReference.newBuilder().addOverloadIds("select_optional_field").build()); return expr; } /** * Attempt to interpret an expression as a qualified name. This traverses select and getIdent * expression and returns the name they constitute, or null if the expression cannot be * interpreted like this. */ private @Nullable String asQualifiedName(CelExpr expr) { switch (expr.exprKind().getKind()) { case IDENT: return expr.ident().name(); case SELECT: String qname = asQualifiedName(expr.select().operand()); if (qname != null) { return qname + "." + expr.select().field(); } return null; default: return null; } } /** Returns the field type give a type instance and field name. */ private TypeProvider.FieldType getFieldType( long exprId, int position, CelType type, String fieldName) { String typeName = type.name(); if (typeProvider.lookupCelType(typeName).isPresent()) { TypeProvider.FieldType fieldType = typeProvider.lookupFieldType(type, fieldName); if (fieldType != null) { return fieldType; } TypeProvider.ExtensionFieldType extensionFieldType = typeProvider.lookupExtensionType(fieldName); if (extensionFieldType != null) { return extensionFieldType.fieldType(); } env.reportError(exprId, position, "undefined field '%s'", fieldName); } else { // Proto message was added as a variable to the environment but the descriptor was not // provided String errorMessage = String.format("Message type resolution failure while referencing field '%s'.", fieldName); if (type.kind().equals(CelKind.STRUCT)) { errorMessage += String.format( " Ensure that the descriptor for type '%s' was added to the environment", typeName); } env.reportError(exprId, position, errorMessage, fieldName, typeName); } return ERROR; } /** Checks compatibility of joined types, and returns the most general common type. */ private CelType joinTypes(long exprId, int position, CelType previousType, CelType type) { if (previousType == null) { return type; } if (homogeneousLiterals) { assertIsAssignable(exprId, position, type, previousType); } else if (!inferenceContext.isAssignable(previousType, type)) { return SimpleType.DYN; } return Types.mostGeneral(previousType, type); } private void assertIsAssignable(long exprId, int position, CelType actual, CelType expected) { if (!inferenceContext.isAssignable(expected, actual)) { env.reportError( exprId, position, "expected type '%s' but found '%s'", CelTypes.format(expected), CelTypes.format(actual)); } } private CelType unwrapOptional(CelType type) { return type.parameters().get(0); } private void assertType(CelExpr expr, CelType type) { assertIsAssignable(expr.id(), getPosition(expr), env.getType(expr), type); } private int getPosition(CelExpr expr) { Integer pos = positionMap.get(expr.id()); return pos == null ? 0 : pos; } private int getPosition(CelExpr.CelStruct.Entry entry) { Integer pos = positionMap.get(entry.id()); return pos == null ? 0 : pos; } /** Helper object for holding an overload resolution result. */ @AutoValue protected abstract static class OverloadResolution { /** The {@code Reference} to the declaration name and overload id. */ public abstract CelReference reference(); /** The {@code Type} of the result associated with the overload. */ public abstract CelType type(); /** Construct a new {@code OverloadResolution} from a {@code reference} and {@code type}. */ public static OverloadResolution of(CelReference reference, CelType type) { return new AutoValue_ExprChecker_OverloadResolution(reference, type); } } /** Helper object to represent a {@link TypeProvider.FieldType} lookup failure. */ private static final TypeProvider.FieldType ERROR = TypeProvider.FieldType.of(Types.ERROR); private static CelExpr replaceIdentSubtree(CelExpr expr, String name) { CelExpr.CelIdent newIdent = CelExpr.CelIdent.newBuilder().setName(name).build(); return expr.toBuilder().setIdent(newIdent).build(); } private static CelExpr replaceSelectOperandSubtree(CelExpr expr, CelExpr operand) { CelExpr.CelSelect newSelect = expr.select().toBuilder().setOperand(operand).build(); return expr.toBuilder().setSelect(newSelect).build(); } private static CelExpr replaceCallArgumentSubtree(CelExpr expr, CelExpr newArg, int index) { CelExpr.CelCall newCall = expr.call().toBuilder().setArg(index, newArg).build(); return expr.toBuilder().setCall(newCall).build(); } private static CelExpr replaceCallSubtree(CelExpr expr, String functionName) { CelExpr.CelCall newCall = expr.call().toBuilder().setFunction(functionName).clearTarget().build(); return expr.toBuilder().setCall(newCall).build(); } private static CelExpr replaceCallSubtree(CelExpr expr, CelExpr target) { CelExpr.CelCall newCall = expr.call().toBuilder().setTarget(target).build(); return expr.toBuilder().setCall(newCall).build(); } private static CelExpr replaceListElementSubtree(CelExpr expr, CelExpr element, int index) { CelExpr.CelList newList = expr.list().toBuilder().setElement(index, element).build(); return expr.toBuilder().setList(newList).build(); } private static CelExpr replaceStructEntryValueSubtree(CelExpr expr, CelExpr newValue, int index) { CelExpr.CelStruct struct = expr.struct(); CelExpr.CelStruct.Entry newEntry = struct.entries().get(index).toBuilder().setValue(newValue).build(); struct = struct.toBuilder().setEntry(index, newEntry).build(); return expr.toBuilder().setStruct(struct).build(); } private static CelExpr replaceMapEntryKeySubtree(CelExpr expr, CelExpr newKey, int index) { CelExpr.CelMap map = expr.map(); CelExpr.CelMap.Entry newEntry = map.entries().get(index).toBuilder().setKey(newKey).build(); map = map.toBuilder().setEntry(index, newEntry).build(); return expr.toBuilder().setMap(map).build(); } private static CelExpr replaceMapEntryValueSubtree(CelExpr expr, CelExpr newValue, int index) { CelExpr.CelMap map = expr.map(); CelExpr.CelMap.Entry newEntry = map.entries().get(index).toBuilder().setValue(newValue).build(); map = map.toBuilder().setEntry(index, newEntry).build(); return expr.toBuilder().setMap(map).build(); } private static CelExpr replaceComprehensionAccuInitSubtree(CelExpr expr, CelExpr newAccuInit) { CelExpr.CelComprehension newComprehension = expr.comprehension().toBuilder().setAccuInit(newAccuInit).build(); return expr.toBuilder().setComprehension(newComprehension).build(); } private static CelExpr replaceComprehensionRangeSubtree(CelExpr expr, CelExpr newRange) { CelExpr.CelComprehension newComprehension = expr.comprehension().toBuilder().setIterRange(newRange).build(); return expr.toBuilder().setComprehension(newComprehension).build(); } private static CelExpr replaceComprehensionStepSubtree(CelExpr expr, CelExpr newStep) { CelExpr.CelComprehension newComprehension = expr.comprehension().toBuilder().setLoopStep(newStep).build(); return expr.toBuilder().setComprehension(newComprehension).build(); } private static CelExpr replaceComprehensionResultSubtree(CelExpr expr, CelExpr newResult) { CelExpr.CelComprehension newComprehension = expr.comprehension().toBuilder().setResult(newResult).build(); return expr.toBuilder().setComprehension(newComprehension).build(); } }
googleapis/google-cloud-java
36,644
java-cloudsupport/proto-google-cloud-cloudsupport-v2beta/src/main/java/com/google/cloud/support/v2beta/ListCommentsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/support/v2beta/comment_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.support.v2beta; /** * * * <pre> * The response message for the ListComments endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2beta.ListCommentsResponse} */ public final class ListCommentsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.support.v2beta.ListCommentsResponse) ListCommentsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCommentsResponse.newBuilder() to construct. private ListCommentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCommentsResponse() { comments_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCommentsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.CommentServiceProto .internal_static_google_cloud_support_v2beta_ListCommentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.CommentServiceProto .internal_static_google_cloud_support_v2beta_ListCommentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.ListCommentsResponse.class, com.google.cloud.support.v2beta.ListCommentsResponse.Builder.class); } public static final int COMMENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.support.v2beta.Comment> comments_; /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.support.v2beta.Comment> getCommentsList() { return comments_; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.support.v2beta.CommentOrBuilder> getCommentsOrBuilderList() { return comments_; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ @java.lang.Override public int getCommentsCount() { return comments_.size(); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2beta.Comment getComments(int index) { return comments_.get(index); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2beta.CommentOrBuilder getCommentsOrBuilder(int index) { return comments_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < comments_.size(); i++) { output.writeMessage(1, comments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < comments_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, comments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.support.v2beta.ListCommentsResponse)) { return super.equals(obj); } com.google.cloud.support.v2beta.ListCommentsResponse other = (com.google.cloud.support.v2beta.ListCommentsResponse) obj; if (!getCommentsList().equals(other.getCommentsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCommentsCount() > 0) { hash = (37 * hash) + COMMENTS_FIELD_NUMBER; hash = (53 * hash) + getCommentsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ListCommentsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.support.v2beta.ListCommentsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the ListComments endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2beta.ListCommentsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.support.v2beta.ListCommentsResponse) com.google.cloud.support.v2beta.ListCommentsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.CommentServiceProto .internal_static_google_cloud_support_v2beta_ListCommentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.CommentServiceProto .internal_static_google_cloud_support_v2beta_ListCommentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.ListCommentsResponse.class, com.google.cloud.support.v2beta.ListCommentsResponse.Builder.class); } // Construct using com.google.cloud.support.v2beta.ListCommentsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (commentsBuilder_ == null) { comments_ = java.util.Collections.emptyList(); } else { comments_ = null; commentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.support.v2beta.CommentServiceProto .internal_static_google_cloud_support_v2beta_ListCommentsResponse_descriptor; } @java.lang.Override public com.google.cloud.support.v2beta.ListCommentsResponse getDefaultInstanceForType() { return com.google.cloud.support.v2beta.ListCommentsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.support.v2beta.ListCommentsResponse build() { com.google.cloud.support.v2beta.ListCommentsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.support.v2beta.ListCommentsResponse buildPartial() { com.google.cloud.support.v2beta.ListCommentsResponse result = new com.google.cloud.support.v2beta.ListCommentsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.support.v2beta.ListCommentsResponse result) { if (commentsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { comments_ = java.util.Collections.unmodifiableList(comments_); bitField0_ = (bitField0_ & ~0x00000001); } result.comments_ = comments_; } else { result.comments_ = commentsBuilder_.build(); } } private void buildPartial0(com.google.cloud.support.v2beta.ListCommentsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.support.v2beta.ListCommentsResponse) { return mergeFrom((com.google.cloud.support.v2beta.ListCommentsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.support.v2beta.ListCommentsResponse other) { if (other == com.google.cloud.support.v2beta.ListCommentsResponse.getDefaultInstance()) return this; if (commentsBuilder_ == null) { if (!other.comments_.isEmpty()) { if (comments_.isEmpty()) { comments_ = other.comments_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCommentsIsMutable(); comments_.addAll(other.comments_); } onChanged(); } } else { if (!other.comments_.isEmpty()) { if (commentsBuilder_.isEmpty()) { commentsBuilder_.dispose(); commentsBuilder_ = null; comments_ = other.comments_; bitField0_ = (bitField0_ & ~0x00000001); commentsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCommentsFieldBuilder() : null; } else { commentsBuilder_.addAllMessages(other.comments_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.support.v2beta.Comment m = input.readMessage( com.google.cloud.support.v2beta.Comment.parser(), extensionRegistry); if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.add(m); } else { commentsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.support.v2beta.Comment> comments_ = java.util.Collections.emptyList(); private void ensureCommentsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { comments_ = new java.util.ArrayList<com.google.cloud.support.v2beta.Comment>(comments_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2beta.Comment, com.google.cloud.support.v2beta.Comment.Builder, com.google.cloud.support.v2beta.CommentOrBuilder> commentsBuilder_; /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public java.util.List<com.google.cloud.support.v2beta.Comment> getCommentsList() { if (commentsBuilder_ == null) { return java.util.Collections.unmodifiableList(comments_); } else { return commentsBuilder_.getMessageList(); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public int getCommentsCount() { if (commentsBuilder_ == null) { return comments_.size(); } else { return commentsBuilder_.getCount(); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public com.google.cloud.support.v2beta.Comment getComments(int index) { if (commentsBuilder_ == null) { return comments_.get(index); } else { return commentsBuilder_.getMessage(index); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder setComments(int index, com.google.cloud.support.v2beta.Comment value) { if (commentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCommentsIsMutable(); comments_.set(index, value); onChanged(); } else { commentsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder setComments( int index, com.google.cloud.support.v2beta.Comment.Builder builderForValue) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.set(index, builderForValue.build()); onChanged(); } else { commentsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder addComments(com.google.cloud.support.v2beta.Comment value) { if (commentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCommentsIsMutable(); comments_.add(value); onChanged(); } else { commentsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder addComments(int index, com.google.cloud.support.v2beta.Comment value) { if (commentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCommentsIsMutable(); comments_.add(index, value); onChanged(); } else { commentsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder addComments(com.google.cloud.support.v2beta.Comment.Builder builderForValue) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.add(builderForValue.build()); onChanged(); } else { commentsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder addComments( int index, com.google.cloud.support.v2beta.Comment.Builder builderForValue) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.add(index, builderForValue.build()); onChanged(); } else { commentsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder addAllComments( java.lang.Iterable<? extends com.google.cloud.support.v2beta.Comment> values) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, comments_); onChanged(); } else { commentsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder clearComments() { if (commentsBuilder_ == null) { comments_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { commentsBuilder_.clear(); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public Builder removeComments(int index) { if (commentsBuilder_ == null) { ensureCommentsIsMutable(); comments_.remove(index); onChanged(); } else { commentsBuilder_.remove(index); } return this; } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public com.google.cloud.support.v2beta.Comment.Builder getCommentsBuilder(int index) { return getCommentsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public com.google.cloud.support.v2beta.CommentOrBuilder getCommentsOrBuilder(int index) { if (commentsBuilder_ == null) { return comments_.get(index); } else { return commentsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public java.util.List<? extends com.google.cloud.support.v2beta.CommentOrBuilder> getCommentsOrBuilderList() { if (commentsBuilder_ != null) { return commentsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(comments_); } } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public com.google.cloud.support.v2beta.Comment.Builder addCommentsBuilder() { return getCommentsFieldBuilder() .addBuilder(com.google.cloud.support.v2beta.Comment.getDefaultInstance()); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public com.google.cloud.support.v2beta.Comment.Builder addCommentsBuilder(int index) { return getCommentsFieldBuilder() .addBuilder(index, com.google.cloud.support.v2beta.Comment.getDefaultInstance()); } /** * * * <pre> * List of the comments associated with the case. * </pre> * * <code>repeated .google.cloud.support.v2beta.Comment comments = 1;</code> */ public java.util.List<com.google.cloud.support.v2beta.Comment.Builder> getCommentsBuilderList() { return getCommentsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2beta.Comment, com.google.cloud.support.v2beta.Comment.Builder, com.google.cloud.support.v2beta.CommentOrBuilder> getCommentsFieldBuilder() { if (commentsBuilder_ == null) { commentsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2beta.Comment, com.google.cloud.support.v2beta.Comment.Builder, com.google.cloud.support.v2beta.CommentOrBuilder>( comments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); comments_ = null; } return commentsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.comments.list` requests. If unspecified, there * are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.support.v2beta.ListCommentsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.support.v2beta.ListCommentsResponse) private static final com.google.cloud.support.v2beta.ListCommentsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.support.v2beta.ListCommentsResponse(); } public static com.google.cloud.support.v2beta.ListCommentsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCommentsResponse> PARSER = new com.google.protobuf.AbstractParser<ListCommentsResponse>() { @java.lang.Override public ListCommentsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCommentsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCommentsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.support.v2beta.ListCommentsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,665
java-shopping-merchant-conversions/proto-google-shopping-merchant-conversions-v1beta/src/main/java/com/google/shopping/merchant/conversions/v1beta/GoogleAnalyticsLink.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/conversions/v1beta/conversionsources.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.conversions.v1beta; /** * * * <pre> * "Google Analytics Link" sources can be used to get conversion data from an * existing Google Analytics property into the linked Merchant Center account. * </pre> * * Protobuf type {@code google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink} */ public final class GoogleAnalyticsLink extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink) GoogleAnalyticsLinkOrBuilder { private static final long serialVersionUID = 0L; // Use GoogleAnalyticsLink.newBuilder() to construct. private GoogleAnalyticsLink(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GoogleAnalyticsLink() { property_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GoogleAnalyticsLink(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.conversions.v1beta.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1beta_GoogleAnalyticsLink_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.conversions.v1beta.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1beta_GoogleAnalyticsLink_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink.class, com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink.Builder.class); } private int bitField0_; public static final int PROPERTY_ID_FIELD_NUMBER = 1; private long propertyId_ = 0L; /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return The propertyId. */ @java.lang.Override public long getPropertyId() { return propertyId_; } public static final int ATTRIBUTION_SETTINGS_FIELD_NUMBER = 2; private com.google.shopping.merchant.conversions.v1beta.AttributionSettings attributionSettings_; /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the attributionSettings field is set. */ @java.lang.Override public boolean hasAttributionSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The attributionSettings. */ @java.lang.Override public com.google.shopping.merchant.conversions.v1beta.AttributionSettings getAttributionSettings() { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1beta.AttributionSettings.getDefaultInstance() : attributionSettings_; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.shopping.merchant.conversions.v1beta.AttributionSettingsOrBuilder getAttributionSettingsOrBuilder() { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1beta.AttributionSettings.getDefaultInstance() : attributionSettings_; } public static final int PROPERTY_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object property_ = ""; /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The property. */ @java.lang.Override public java.lang.String getProperty() { java.lang.Object ref = property_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); property_ = s; return s; } } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for property. */ @java.lang.Override public com.google.protobuf.ByteString getPropertyBytes() { java.lang.Object ref = property_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); property_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (propertyId_ != 0L) { output.writeInt64(1, propertyId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getAttributionSettings()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(property_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, property_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (propertyId_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, propertyId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getAttributionSettings()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(property_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, property_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink)) { return super.equals(obj); } com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink other = (com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink) obj; if (getPropertyId() != other.getPropertyId()) return false; if (hasAttributionSettings() != other.hasAttributionSettings()) return false; if (hasAttributionSettings()) { if (!getAttributionSettings().equals(other.getAttributionSettings())) return false; } if (!getProperty().equals(other.getProperty())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROPERTY_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getPropertyId()); if (hasAttributionSettings()) { hash = (37 * hash) + ATTRIBUTION_SETTINGS_FIELD_NUMBER; hash = (53 * hash) + getAttributionSettings().hashCode(); } hash = (37 * hash) + PROPERTY_FIELD_NUMBER; hash = (53 * hash) + getProperty().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * "Google Analytics Link" sources can be used to get conversion data from an * existing Google Analytics property into the linked Merchant Center account. * </pre> * * Protobuf type {@code google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink) com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLinkOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.conversions.v1beta.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1beta_GoogleAnalyticsLink_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.conversions.v1beta.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1beta_GoogleAnalyticsLink_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink.class, com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink.Builder.class); } // Construct using // com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getAttributionSettingsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; propertyId_ = 0L; attributionSettings_ = null; if (attributionSettingsBuilder_ != null) { attributionSettingsBuilder_.dispose(); attributionSettingsBuilder_ = null; } property_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.conversions.v1beta.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1beta_GoogleAnalyticsLink_descriptor; } @java.lang.Override public com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink getDefaultInstanceForType() { return com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink .getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink build() { com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink buildPartial() { com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink result = new com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.propertyId_ = propertyId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.attributionSettings_ = attributionSettingsBuilder_ == null ? attributionSettings_ : attributionSettingsBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.property_ = property_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink) { return mergeFrom( (com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink other) { if (other == com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink .getDefaultInstance()) return this; if (other.getPropertyId() != 0L) { setPropertyId(other.getPropertyId()); } if (other.hasAttributionSettings()) { mergeAttributionSettings(other.getAttributionSettings()); } if (!other.getProperty().isEmpty()) { property_ = other.property_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { propertyId_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getAttributionSettingsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { property_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long propertyId_; /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return The propertyId. */ @java.lang.Override public long getPropertyId() { return propertyId_; } /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @param value The propertyId to set. * @return This builder for chaining. */ public Builder setPropertyId(long value) { propertyId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Immutable. ID of the Google Analytics property the merchant is * linked to. * </pre> * * <code> * int64 property_id = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return This builder for chaining. */ public Builder clearPropertyId() { bitField0_ = (bitField0_ & ~0x00000001); propertyId_ = 0L; onChanged(); return this; } private com.google.shopping.merchant.conversions.v1beta.AttributionSettings attributionSettings_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1beta.AttributionSettings, com.google.shopping.merchant.conversions.v1beta.AttributionSettings.Builder, com.google.shopping.merchant.conversions.v1beta.AttributionSettingsOrBuilder> attributionSettingsBuilder_; /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the attributionSettings field is set. */ public boolean hasAttributionSettings() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The attributionSettings. */ public com.google.shopping.merchant.conversions.v1beta.AttributionSettings getAttributionSettings() { if (attributionSettingsBuilder_ == null) { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1beta.AttributionSettings .getDefaultInstance() : attributionSettings_; } else { return attributionSettingsBuilder_.getMessage(); } } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAttributionSettings( com.google.shopping.merchant.conversions.v1beta.AttributionSettings value) { if (attributionSettingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } attributionSettings_ = value; } else { attributionSettingsBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAttributionSettings( com.google.shopping.merchant.conversions.v1beta.AttributionSettings.Builder builderForValue) { if (attributionSettingsBuilder_ == null) { attributionSettings_ = builderForValue.build(); } else { attributionSettingsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeAttributionSettings( com.google.shopping.merchant.conversions.v1beta.AttributionSettings value) { if (attributionSettingsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && attributionSettings_ != null && attributionSettings_ != com.google.shopping.merchant.conversions.v1beta.AttributionSettings .getDefaultInstance()) { getAttributionSettingsBuilder().mergeFrom(value); } else { attributionSettings_ = value; } } else { attributionSettingsBuilder_.mergeFrom(value); } if (attributionSettings_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearAttributionSettings() { bitField0_ = (bitField0_ & ~0x00000002); attributionSettings_ = null; if (attributionSettingsBuilder_ != null) { attributionSettingsBuilder_.dispose(); attributionSettingsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.shopping.merchant.conversions.v1beta.AttributionSettings.Builder getAttributionSettingsBuilder() { bitField0_ |= 0x00000002; onChanged(); return getAttributionSettingsFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.shopping.merchant.conversions.v1beta.AttributionSettingsOrBuilder getAttributionSettingsOrBuilder() { if (attributionSettingsBuilder_ != null) { return attributionSettingsBuilder_.getMessageOrBuilder(); } else { return attributionSettings_ == null ? com.google.shopping.merchant.conversions.v1beta.AttributionSettings .getDefaultInstance() : attributionSettings_; } } /** * * * <pre> * Output only. Attribution settings for the linked Google Analytics property. * </pre> * * <code> * .google.shopping.merchant.conversions.v1beta.AttributionSettings attribution_settings = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1beta.AttributionSettings, com.google.shopping.merchant.conversions.v1beta.AttributionSettings.Builder, com.google.shopping.merchant.conversions.v1beta.AttributionSettingsOrBuilder> getAttributionSettingsFieldBuilder() { if (attributionSettingsBuilder_ == null) { attributionSettingsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1beta.AttributionSettings, com.google.shopping.merchant.conversions.v1beta.AttributionSettings.Builder, com.google.shopping.merchant.conversions.v1beta.AttributionSettingsOrBuilder>( getAttributionSettings(), getParentForChildren(), isClean()); attributionSettings_ = null; } return attributionSettingsBuilder_; } private java.lang.Object property_ = ""; /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The property. */ public java.lang.String getProperty() { java.lang.Object ref = property_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); property_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for property. */ public com.google.protobuf.ByteString getPropertyBytes() { java.lang.Object ref = property_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); property_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The property to set. * @return This builder for chaining. */ public Builder setProperty(java.lang.String value) { if (value == null) { throw new NullPointerException(); } property_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearProperty() { property_ = getDefaultInstance().getProperty(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Output only. Name of the Google Analytics property the merchant is linked * to. * </pre> * * <code>string property = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for property to set. * @return This builder for chaining. */ public Builder setPropertyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); property_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink) private static final com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink(); } public static com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GoogleAnalyticsLink> PARSER = new com.google.protobuf.AbstractParser<GoogleAnalyticsLink>() { @java.lang.Override public GoogleAnalyticsLink parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GoogleAnalyticsLink> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GoogleAnalyticsLink> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.conversions.v1beta.GoogleAnalyticsLink getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,751
java-vmmigration/proto-google-cloud-vmmigration-v1/src/main/java/com/google/cloud/vmmigration/v1/AvailableUpdates.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vmmigration/v1/vmmigration.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vmmigration.v1; /** * * * <pre> * Holds information about the available versions for upgrade. * </pre> * * Protobuf type {@code google.cloud.vmmigration.v1.AvailableUpdates} */ public final class AvailableUpdates extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vmmigration.v1.AvailableUpdates) AvailableUpdatesOrBuilder { private static final long serialVersionUID = 0L; // Use AvailableUpdates.newBuilder() to construct. private AvailableUpdates(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AvailableUpdates() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AvailableUpdates(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vmmigration.v1.VmMigrationProto .internal_static_google_cloud_vmmigration_v1_AvailableUpdates_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vmmigration.v1.VmMigrationProto .internal_static_google_cloud_vmmigration_v1_AvailableUpdates_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vmmigration.v1.AvailableUpdates.class, com.google.cloud.vmmigration.v1.AvailableUpdates.Builder.class); } private int bitField0_; public static final int NEW_DEPLOYABLE_APPLIANCE_FIELD_NUMBER = 1; private com.google.cloud.vmmigration.v1.ApplianceVersion newDeployableAppliance_; /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> * * @return Whether the newDeployableAppliance field is set. */ @java.lang.Override public boolean hasNewDeployableAppliance() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> * * @return The newDeployableAppliance. */ @java.lang.Override public com.google.cloud.vmmigration.v1.ApplianceVersion getNewDeployableAppliance() { return newDeployableAppliance_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : newDeployableAppliance_; } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ @java.lang.Override public com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder getNewDeployableApplianceOrBuilder() { return newDeployableAppliance_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : newDeployableAppliance_; } public static final int IN_PLACE_UPDATE_FIELD_NUMBER = 2; private com.google.cloud.vmmigration.v1.ApplianceVersion inPlaceUpdate_; /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> * * @return Whether the inPlaceUpdate field is set. */ @java.lang.Override public boolean hasInPlaceUpdate() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> * * @return The inPlaceUpdate. */ @java.lang.Override public com.google.cloud.vmmigration.v1.ApplianceVersion getInPlaceUpdate() { return inPlaceUpdate_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : inPlaceUpdate_; } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ @java.lang.Override public com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder getInPlaceUpdateOrBuilder() { return inPlaceUpdate_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : inPlaceUpdate_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNewDeployableAppliance()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getInPlaceUpdate()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNewDeployableAppliance()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInPlaceUpdate()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vmmigration.v1.AvailableUpdates)) { return super.equals(obj); } com.google.cloud.vmmigration.v1.AvailableUpdates other = (com.google.cloud.vmmigration.v1.AvailableUpdates) obj; if (hasNewDeployableAppliance() != other.hasNewDeployableAppliance()) return false; if (hasNewDeployableAppliance()) { if (!getNewDeployableAppliance().equals(other.getNewDeployableAppliance())) return false; } if (hasInPlaceUpdate() != other.hasInPlaceUpdate()) return false; if (hasInPlaceUpdate()) { if (!getInPlaceUpdate().equals(other.getInPlaceUpdate())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNewDeployableAppliance()) { hash = (37 * hash) + NEW_DEPLOYABLE_APPLIANCE_FIELD_NUMBER; hash = (53 * hash) + getNewDeployableAppliance().hashCode(); } if (hasInPlaceUpdate()) { hash = (37 * hash) + IN_PLACE_UPDATE_FIELD_NUMBER; hash = (53 * hash) + getInPlaceUpdate().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vmmigration.v1.AvailableUpdates parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.vmmigration.v1.AvailableUpdates prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Holds information about the available versions for upgrade. * </pre> * * Protobuf type {@code google.cloud.vmmigration.v1.AvailableUpdates} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vmmigration.v1.AvailableUpdates) com.google.cloud.vmmigration.v1.AvailableUpdatesOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vmmigration.v1.VmMigrationProto .internal_static_google_cloud_vmmigration_v1_AvailableUpdates_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vmmigration.v1.VmMigrationProto .internal_static_google_cloud_vmmigration_v1_AvailableUpdates_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vmmigration.v1.AvailableUpdates.class, com.google.cloud.vmmigration.v1.AvailableUpdates.Builder.class); } // Construct using com.google.cloud.vmmigration.v1.AvailableUpdates.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getNewDeployableApplianceFieldBuilder(); getInPlaceUpdateFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; newDeployableAppliance_ = null; if (newDeployableApplianceBuilder_ != null) { newDeployableApplianceBuilder_.dispose(); newDeployableApplianceBuilder_ = null; } inPlaceUpdate_ = null; if (inPlaceUpdateBuilder_ != null) { inPlaceUpdateBuilder_.dispose(); inPlaceUpdateBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vmmigration.v1.VmMigrationProto .internal_static_google_cloud_vmmigration_v1_AvailableUpdates_descriptor; } @java.lang.Override public com.google.cloud.vmmigration.v1.AvailableUpdates getDefaultInstanceForType() { return com.google.cloud.vmmigration.v1.AvailableUpdates.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vmmigration.v1.AvailableUpdates build() { com.google.cloud.vmmigration.v1.AvailableUpdates result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vmmigration.v1.AvailableUpdates buildPartial() { com.google.cloud.vmmigration.v1.AvailableUpdates result = new com.google.cloud.vmmigration.v1.AvailableUpdates(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.vmmigration.v1.AvailableUpdates result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.newDeployableAppliance_ = newDeployableApplianceBuilder_ == null ? newDeployableAppliance_ : newDeployableApplianceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.inPlaceUpdate_ = inPlaceUpdateBuilder_ == null ? inPlaceUpdate_ : inPlaceUpdateBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vmmigration.v1.AvailableUpdates) { return mergeFrom((com.google.cloud.vmmigration.v1.AvailableUpdates) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vmmigration.v1.AvailableUpdates other) { if (other == com.google.cloud.vmmigration.v1.AvailableUpdates.getDefaultInstance()) return this; if (other.hasNewDeployableAppliance()) { mergeNewDeployableAppliance(other.getNewDeployableAppliance()); } if (other.hasInPlaceUpdate()) { mergeInPlaceUpdate(other.getInPlaceUpdate()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNewDeployableApplianceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getInPlaceUpdateFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.vmmigration.v1.ApplianceVersion newDeployableAppliance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vmmigration.v1.ApplianceVersion, com.google.cloud.vmmigration.v1.ApplianceVersion.Builder, com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder> newDeployableApplianceBuilder_; /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> * * @return Whether the newDeployableAppliance field is set. */ public boolean hasNewDeployableAppliance() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> * * @return The newDeployableAppliance. */ public com.google.cloud.vmmigration.v1.ApplianceVersion getNewDeployableAppliance() { if (newDeployableApplianceBuilder_ == null) { return newDeployableAppliance_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : newDeployableAppliance_; } else { return newDeployableApplianceBuilder_.getMessage(); } } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ public Builder setNewDeployableAppliance( com.google.cloud.vmmigration.v1.ApplianceVersion value) { if (newDeployableApplianceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } newDeployableAppliance_ = value; } else { newDeployableApplianceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ public Builder setNewDeployableAppliance( com.google.cloud.vmmigration.v1.ApplianceVersion.Builder builderForValue) { if (newDeployableApplianceBuilder_ == null) { newDeployableAppliance_ = builderForValue.build(); } else { newDeployableApplianceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ public Builder mergeNewDeployableAppliance( com.google.cloud.vmmigration.v1.ApplianceVersion value) { if (newDeployableApplianceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && newDeployableAppliance_ != null && newDeployableAppliance_ != com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance()) { getNewDeployableApplianceBuilder().mergeFrom(value); } else { newDeployableAppliance_ = value; } } else { newDeployableApplianceBuilder_.mergeFrom(value); } if (newDeployableAppliance_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ public Builder clearNewDeployableAppliance() { bitField0_ = (bitField0_ & ~0x00000001); newDeployableAppliance_ = null; if (newDeployableApplianceBuilder_ != null) { newDeployableApplianceBuilder_.dispose(); newDeployableApplianceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ public com.google.cloud.vmmigration.v1.ApplianceVersion.Builder getNewDeployableApplianceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNewDeployableApplianceFieldBuilder().getBuilder(); } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ public com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder getNewDeployableApplianceOrBuilder() { if (newDeployableApplianceBuilder_ != null) { return newDeployableApplianceBuilder_.getMessageOrBuilder(); } else { return newDeployableAppliance_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : newDeployableAppliance_; } } /** * * * <pre> * The newest deployable version of the appliance. * The current appliance can't be updated into this version, and the owner * must manually deploy this OVA to a new appliance. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion new_deployable_appliance = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vmmigration.v1.ApplianceVersion, com.google.cloud.vmmigration.v1.ApplianceVersion.Builder, com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder> getNewDeployableApplianceFieldBuilder() { if (newDeployableApplianceBuilder_ == null) { newDeployableApplianceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vmmigration.v1.ApplianceVersion, com.google.cloud.vmmigration.v1.ApplianceVersion.Builder, com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder>( getNewDeployableAppliance(), getParentForChildren(), isClean()); newDeployableAppliance_ = null; } return newDeployableApplianceBuilder_; } private com.google.cloud.vmmigration.v1.ApplianceVersion inPlaceUpdate_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vmmigration.v1.ApplianceVersion, com.google.cloud.vmmigration.v1.ApplianceVersion.Builder, com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder> inPlaceUpdateBuilder_; /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> * * @return Whether the inPlaceUpdate field is set. */ public boolean hasInPlaceUpdate() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> * * @return The inPlaceUpdate. */ public com.google.cloud.vmmigration.v1.ApplianceVersion getInPlaceUpdate() { if (inPlaceUpdateBuilder_ == null) { return inPlaceUpdate_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : inPlaceUpdate_; } else { return inPlaceUpdateBuilder_.getMessage(); } } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ public Builder setInPlaceUpdate(com.google.cloud.vmmigration.v1.ApplianceVersion value) { if (inPlaceUpdateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } inPlaceUpdate_ = value; } else { inPlaceUpdateBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ public Builder setInPlaceUpdate( com.google.cloud.vmmigration.v1.ApplianceVersion.Builder builderForValue) { if (inPlaceUpdateBuilder_ == null) { inPlaceUpdate_ = builderForValue.build(); } else { inPlaceUpdateBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ public Builder mergeInPlaceUpdate(com.google.cloud.vmmigration.v1.ApplianceVersion value) { if (inPlaceUpdateBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && inPlaceUpdate_ != null && inPlaceUpdate_ != com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance()) { getInPlaceUpdateBuilder().mergeFrom(value); } else { inPlaceUpdate_ = value; } } else { inPlaceUpdateBuilder_.mergeFrom(value); } if (inPlaceUpdate_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ public Builder clearInPlaceUpdate() { bitField0_ = (bitField0_ & ~0x00000002); inPlaceUpdate_ = null; if (inPlaceUpdateBuilder_ != null) { inPlaceUpdateBuilder_.dispose(); inPlaceUpdateBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ public com.google.cloud.vmmigration.v1.ApplianceVersion.Builder getInPlaceUpdateBuilder() { bitField0_ |= 0x00000002; onChanged(); return getInPlaceUpdateFieldBuilder().getBuilder(); } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ public com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder getInPlaceUpdateOrBuilder() { if (inPlaceUpdateBuilder_ != null) { return inPlaceUpdateBuilder_.getMessageOrBuilder(); } else { return inPlaceUpdate_ == null ? com.google.cloud.vmmigration.v1.ApplianceVersion.getDefaultInstance() : inPlaceUpdate_; } } /** * * * <pre> * The latest version for in place update. * The current appliance can be updated to this version using the API or m4c * CLI. * </pre> * * <code>.google.cloud.vmmigration.v1.ApplianceVersion in_place_update = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vmmigration.v1.ApplianceVersion, com.google.cloud.vmmigration.v1.ApplianceVersion.Builder, com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder> getInPlaceUpdateFieldBuilder() { if (inPlaceUpdateBuilder_ == null) { inPlaceUpdateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vmmigration.v1.ApplianceVersion, com.google.cloud.vmmigration.v1.ApplianceVersion.Builder, com.google.cloud.vmmigration.v1.ApplianceVersionOrBuilder>( getInPlaceUpdate(), getParentForChildren(), isClean()); inPlaceUpdate_ = null; } return inPlaceUpdateBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vmmigration.v1.AvailableUpdates) } // @@protoc_insertion_point(class_scope:google.cloud.vmmigration.v1.AvailableUpdates) private static final com.google.cloud.vmmigration.v1.AvailableUpdates DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vmmigration.v1.AvailableUpdates(); } public static com.google.cloud.vmmigration.v1.AvailableUpdates getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AvailableUpdates> PARSER = new com.google.protobuf.AbstractParser<AvailableUpdates>() { @java.lang.Override public AvailableUpdates parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AvailableUpdates> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AvailableUpdates> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vmmigration.v1.AvailableUpdates getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/maven-plugins
36,518
maven-assembly-plugin/src/test/java/org/apache/maven/plugins/assembly/utils/AssemblyFormatUtilsTest.java
package org.apache.maven.plugins.assembly.utils; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import junit.framework.TestCase; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Build; import org.apache.maven.model.Model; import org.apache.maven.plugins.assembly.AssemblerConfigurationSource; import org.apache.maven.plugins.assembly.archive.DefaultAssemblyArchiverTest; import org.apache.maven.plugins.assembly.archive.task.testutils.ArtifactMock; import org.apache.maven.plugins.assembly.format.AssemblyFormattingException; import org.apache.maven.plugins.assembly.model.Assembly; import org.apache.maven.project.MavenProject; import org.easymock.classextension.EasyMockSupport; import java.util.Properties; import static org.easymock.EasyMock.expect; public class AssemblyFormatUtilsTest extends TestCase { private final EasyMockSupport mockManager = new EasyMockSupport(); public void testFixRelativePathRefs_ShouldRemoveRelativeRefToCurrentDir() throws AssemblyFormattingException { assertEquals( "path/", AssemblyFormatUtils.fixRelativeRefs( "./path/" ) ); } public void testFixRelativePathRefs_ShouldRemoveEmbeddedSameDirRef() throws AssemblyFormattingException { assertEquals( "some/path/", AssemblyFormatUtils.fixRelativeRefs( "some/./path/" ) ); assertEquals( "some\\path\\", AssemblyFormatUtils.fixRelativeRefs( "some\\.\\path\\" ) ); } public void testFixRelativePathRefs_ShouldRemoveEmbeddedParentDirRef() throws AssemblyFormattingException { assertEquals( "path/", AssemblyFormatUtils.fixRelativeRefs( "some/../path/" ) ); } public void testFixRelativePathRefs_ShouldTruncateRelativeRefToParentDir() throws AssemblyFormattingException { assertEquals( "path/", AssemblyFormatUtils.fixRelativeRefs( "../path/" ) ); } public void testGetDistroName_ShouldUseJustFinalNameWithNoAppendAssemblyIdOrClassifier() { verifyDistroName( "assembly", "finalName", false, "finalName" ); } public void testGetDistroName_ShouldUseFinalNamePlusAssemblyIdIsNull() { verifyDistroName( "assembly", "finalName", true, "finalName-assembly" ); } public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseArtifactInfo() throws AssemblyFormattingException { verifyOutputDirUsingArtifactProject( "${artifact.groupId}", null, "group", null, null, null, null, "group/" ); } public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseArtifactInfo() throws AssemblyFormattingException { verifyOutputDirUsingArtifactProject( "${artifact.artifactId}", null, null, "artifact", null, null, null, "artifact/" ); } public void testGetOutputDir_ShouldResolveVersionInOutDir_UseArtifactInfo() throws AssemblyFormattingException { verifyOutputDirUsingArtifactProject( "${artifact.version}", null, null, null, "version", null, null, "version/" ); } public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseArtifactInfo() throws AssemblyFormattingException { verifyOutputDirUsingArtifactProject( "${artifact.build.finalName}", null, null, null, null, "finalName", null, "finalName/" ); } public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseModuleInfo() throws AssemblyFormattingException { verifyOutputDirUsingModuleProject( "${module.groupId}", null, "group", null, null, null, null, "group/" ); } public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseModuleInfo() throws AssemblyFormattingException { verifyOutputDirUsingModuleProject( "${module.artifactId}", null, null, "artifact", null, null, null, "artifact/" ); } public void testGetOutputDir_ShouldResolveVersionInOutDir_UseModuleInfo() throws AssemblyFormattingException { verifyOutputDirUsingModuleProject( "${module.version}", null, null, null, "version", null, null, "version/" ); } public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseModuleInfo() throws AssemblyFormattingException { verifyOutputDirUsingModuleProject( "${module.build.finalName}", null, null, null, null, "finalName", null, "finalName/" ); } public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseExplicitMainProject() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${pom.groupId}", null, "group", null, null, null, null, "group/" ); } public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${pom.artifactId}", null, null, "artifact", null, null, null, "artifact/" ); } public void testGetOutputDir_ShouldResolveVersionInOutDir_UseExplicitMainProject() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${pom.version}", null, null, null, "version", null, null, "version/" ); } public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseExplicitMainProject() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${pom.build.finalName}", null, null, null, null, "finalName", null, "finalName/" ); } public void testGetOutputDir_ShouldResolveGroupIdInOutDir_UseExplicitMainProject_projectRef() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${project.groupId}", null, "group", null, null, null, null, "group/" ); } public void testGetOutputDir_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject_projectRef() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${project.artifactId}", null, null, "artifact", null, null, null, "artifact/" ); } public void testGetOutputDir_ShouldResolveVersionInOutDir_UseExplicitMainProject_projectRef() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${project.version}", null, null, null, "version", null, null, "version/" ); } public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir_UseExplicitMainProject_projectRef() throws AssemblyFormattingException { verifyOutputDir( "${project.build.finalName}", null, "finalName", "finalName/" ); } public void testGetOutputDir_ShouldNotAlterOutDirWhenIncludeBaseFalseAndNoExpressions() throws AssemblyFormattingException { verifyOutputDir( "dir/", "finalName", null, "dir/" ); } public void testGetOutputDir_ShouldNotAlterOutDirWhenIncludeBaseFalseAndNoExpressions_CheckWithBackslash() throws AssemblyFormattingException { verifyOutputDir( "dir\\", "finalName", null, "dir\\" ); } public void testGetOutputDir_ShouldAppendSlashToOutDirWhenMissingAndIncludeBaseFalseAndNoExpressions() throws AssemblyFormattingException { verifyOutputDir( "dir", "finalName", null, "dir/" ); } public void testGetOutputDir_ShouldResolveGroupIdInOutDir() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${groupId}", "finalName", "group", null, null, null, null, "group/" ); } public void testGetOutputDir_ShouldResolveArtifactIdInOutDir() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${artifactId}", "finalName", null, "artifact", null, null, null, "artifact/" ); } public void testGetOutputDir_ShouldResolveVersionInOutDir() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "${version}", "finalName", null, null, "version", null, null, "version/" ); } public void testGetOutputDir_ShouldResolveVersionInLargerOutDirExpr() throws AssemblyFormattingException { verifyOutputDirUsingMainProject( "my-special-${version}", "finalName", null, null, "99", null, null, "my-special-99/" ); } public void testGetOutputDir_ShouldResolveFinalNameInOutDir() throws AssemblyFormattingException { verifyOutputDir( "${finalName}", "finalName", null, "finalName/" ); } public void testGetOutputDir_ShouldResolveBuildFinalNameInOutDir() throws AssemblyFormattingException { verifyOutputDir( "${build.finalName}", "finalName", null, "finalName/" ); } public void testGetOutputDir_ShouldReturnEmptyPathWhenAllInputIsEmptyAndIncludeBaseFalse() throws AssemblyFormattingException { verifyOutputDir( null, null, null, "" ); } public void testGetOutputDir_ShouldRemoveRelativeRefToCurrentDir() throws AssemblyFormattingException { verifyOutputDir( "./path/", null, null, "path/" ); } public void testGetOutputDir_ShouldRemoveEmbeddedSameDirRef() throws AssemblyFormattingException { verifyOutputDir( "some/./path/", null, null, "some/path/" ); } public void testGetOutputDir_ShouldRemoveEmbeddedParentDirRef() throws AssemblyFormattingException { verifyOutputDir( "some/../path/", null, null, "path/" ); } public void testGetOutputDir_ShouldTruncateRelativeRefToParentDir() throws AssemblyFormattingException { verifyOutputDir( "../path/", null, null, "path/" ); } public void testGetOutputDir_ShouldResolveProjectProperty() throws AssemblyFormattingException { final Properties props = new Properties(); props.setProperty( "myProperty", "value" ); verifyOutputDirUsingMainProject( "file.${myProperty}", null, null, null, null, null, props, "file.value/" ); } public void testGetOutputDir_ShouldResolveProjectPropertyAltExpr() throws AssemblyFormattingException { final Properties props = new Properties(); props.setProperty( "myProperty", "value" ); verifyOutputDirUsingMainProject( "file.${pom.properties.myProperty}", null, null, null, null, null, props, "file.value/" ); } public void testEvalFileNameMapping_ShouldResolveArtifactIdAndBaseVersionInOutDir_UseArtifactInfo_WithValidMainProject() throws AssemblyFormattingException { final MavenProject mainProject = createProject( "group", "main", "1", null ); final String artifactVersion = "2-20070807.112233-1"; final String artifactBaseVersion = "2-SNAPSHOT"; final MavenProject artifactProject = createProject( "group", "artifact", artifactVersion, null ); final ArtifactMock artifactMock = new ArtifactMock( mockManager, "group", "artifact", artifactVersion, "jar", true, artifactBaseVersion ); artifactProject.setArtifact( artifactMock.getArtifact() ); final MavenSession session = mockManager.createMock( MavenSession.class ); expect( session.getExecutionProperties() ).andReturn( null ).anyTimes(); expect( session.getUserProperties() ).andReturn( new Properties() ).anyTimes(); final AssemblerConfigurationSource cs = mockManager.createMock( AssemblerConfigurationSource.class ); expect( cs.getMavenSession() ).andReturn( session ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( cs ); mockManager.replayAll(); final String result = AssemblyFormatUtils.evaluateFileNameMapping( "${artifact.artifactId}-${artifact.baseVersion}", artifactMock.getArtifact(), mainProject, null, cs, AssemblyFormatUtils.moduleProjectInterpolator( null ), AssemblyFormatUtils.artifactProjectInterpolator( artifactProject ) ); /* final Artifact artifact = artifactMock.getArtifact(); final String result = AssemblyFormatUtils.evaluateFileNameMapping( "${artifact.artifactId}-${artifact.baseVersion}", moduleArtifactInterpolator( null ), moduleProjectInterpolator( null ), artifactInterpolator( artifact ), artifactProjectInterpolator( artifactProject ), mainArtifactPropsOnly( mainProject ), classifierRules( artifact ), FixedStringSearchInterpolator.empty() ); */ assertEquals( "artifact-2-SNAPSHOT", result ); mockManager.verifyAll(); // clear out for next call. mockManager.resetAll(); } public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseArtifactInfo() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingArtifactProject( "${artifact.groupId}", null, "group", null, null, null, "group", null ); } public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseArtifactInfo() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingArtifactProject( "${artifact.artifactId}", null, null, "artifact", null, null, "artifact", null ); } public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseArtifactInfo() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingArtifactProject( "${artifact.version}", null, null, null, "version", null, "version", null ); } public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseArtifactInfoAndModulePrefix() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingModuleProject( "${module.groupId}", null, "group", null, null, null, "group", null ); } public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseArtifactInfoAndModulePrefix() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingModuleProject( "${module.artifactId}", null, null, "artifact", null, null, "artifact", null ); } public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseArtifactInfoAndModulePrefix() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingModuleProject( "${module.version}", null, null, null, "version", null, "version", null ); } public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseExplicitMainProject() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${pom.groupId}", null, "group", null, null, null, "group", null ); } public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${pom.artifactId}", null, null, "artifact", null, null, "artifact", null ); } public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseExplicitMainProject() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${pom.version}", null, null, null, "version", null, "version", null ); } public void testEvalFileNameMapping_ShouldResolveGroupIdInOutDir_UseExplicitMainProject_projectRef() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${project.groupId}", null, "group", null, null, null, "group", null ); } public void testEvalFileNameMapping_ShouldResolveArtifactIdInOutDir_UseExplicitMainProject_projectRef() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${project.artifactId}", null, null, "artifact", null, null, "artifact", null ); } public void testEvalFileNameMapping_ShouldResolveVersionInOutDir_UseExplicitMainProject_projectRef() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${project.version}", null, null, null, "version", null, "version", null ); } public void testEvalFileNameMapping_ShouldRemoveRelativeRefToCurrentDir() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "./path/", null, null, null, null, null, "path/", null ); } public void testEvalFileNameMapping_ShouldRemoveEmbeddedSameDirRef() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "some/./path/", null, null, null, null, null, "some/path/", null ); } public void testEvalFileNameMapping_ShouldRemoveEmbeddedParentDirRef() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "some/../path/", null, null, null, null, null, "path/", null ); } public void testEvalFileNameMapping_ShouldTruncateRelativeRefToParentDir() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "../path/", null, null, null, null, null, "path/", null ); } public void testEvalFileNameMapping_ShouldPassExpressionThroughUnchanged() throws AssemblyFormattingException { verifyEvalFileNameMapping( "filename", null, null, "filename", null ); } public void testEvalFileNameMapping_ShouldInsertClassifierAheadOfExtension() throws AssemblyFormattingException { verifyEvalFileNameMapping( "filename-${artifact.classifier}.ext", "classifier", null, "filename-classifier.ext", null ); } public void testEvalFileNameMapping_ShouldAppendDashClassifierWhenClassifierPresent() throws AssemblyFormattingException { verifyEvalFileNameMapping( "filename${dashClassifier?}", "classifier", null, "filename-classifier", null ); } public void testEvalFileNameMapping_ShouldNotAppendDashClassifierWhenClassifierMissing() throws AssemblyFormattingException { verifyEvalFileNameMapping( "filename${dashClassifier?}", null, null, "filename", null ); } public void testEvalFileNameMapping_ShouldNotAppendDashClassifierWhenClassifierEmpty() throws AssemblyFormattingException { verifyEvalFileNameMapping( "filename${dashClassifier?}", "", null, "filename", null ); } public void testEvalFileNameMapping_ShouldResolveGroupId() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${groupId}", null, "group", null, null, null, "group", null ); } public void testEvalFileNameMapping_ShouldResolveArtifactId() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${artifactId}", null, null, "artifact", null, null, "artifact", null ); } public void testEvalFileNameMapping_ShouldResolveVersion() throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( "${version}", null, null, null, "version", null, "version", null ); } public void testEvalFileNameMapping_ShouldResolveExtension() throws AssemblyFormattingException { verifyEvalFileNameMapping( "file.${artifact.extension}", null, "ext", "file.ext", null ); } public void testEvalFileNameMapping_ShouldResolveProjectProperty() throws AssemblyFormattingException { final Properties props = new Properties(); props.setProperty( "myProperty", "value" ); verifyEvalFileNameMapping( "file.${myProperty}", null, null, "file.value", props ); } public void testEvalFileNameMapping_ShouldResolveProjectPropertyAltExpr() throws AssemblyFormattingException { final Properties props = new Properties(); props.setProperty( "myProperty", "value" ); verifyEvalFileNameMapping( "file.${pom.properties.myProperty}", null, null, "file.value", props ); } public void testEvalFileNameMapping_ShouldResolveSystemPropertyWithoutMainProjectPresent() throws AssemblyFormattingException { verifyEvalFileNameMapping( "file.${java.version}", null, null, "file." + System.getProperty( "java.version" ), null ); } private void verifyEvalFileNameMapping( final String expression, final String classifier, final String extension, final String checkValue, final Properties projectProperties ) throws AssemblyFormattingException { verifyEvalFileNameMappingUsingMainProject( expression, classifier, null, null, null, extension, checkValue, projectProperties ); } private void verifyEvalFileNameMappingUsingMainProject( final String expression, final String classifier, final String groupId, final String artifactId, final String version, final String extension, final String checkValue, final Properties projectProperties ) throws AssemblyFormattingException { final MavenProject mainProject = createProject( groupId, artifactId, version, projectProperties ); final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null ); final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null ); verifyEvalFileNameMapping( expression, classifier, extension, mainProject, moduleProject, artifactProject, checkValue ); } private void verifyEvalFileNameMappingUsingArtifactProject( final String expression, final String classifier, final String groupId, final String artifactId, final String version, final String extension, final String checkValue, final Properties projectProperties ) throws AssemblyFormattingException { final MavenProject artifactProject = createProject( groupId, artifactId, version, projectProperties ); final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null ); final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null ); verifyEvalFileNameMapping( expression, classifier, extension, mainProject, moduleProject, artifactProject, checkValue ); } private void verifyEvalFileNameMappingUsingModuleProject( final String expression, final String classifier, final String groupId, final String artifactId, final String version, final String extension, final String checkValue, final Properties projectProperties ) throws AssemblyFormattingException { final MavenProject moduleProject = createProject( groupId, artifactId, version, projectProperties ); final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null ); final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null ); verifyEvalFileNameMapping( expression, classifier, extension, mainProject, moduleProject, artifactProject, checkValue ); } private MavenProject createProject( String groupId, String artifactId, String version, final Properties projectProperties ) { if ( artifactId == null ) { artifactId = "artifact"; } if ( groupId == null ) { groupId = "group"; } if ( version == null ) { version = "version"; } final Model model = new Model(); model.setGroupId( groupId ); model.setArtifactId( artifactId ); model.setVersion( version ); model.setProperties( projectProperties ); return new MavenProject( model ); } private void verifyEvalFileNameMapping( final String expression, final String classifier, final String extension, final MavenProject mainProject, final MavenProject moduleProject, final MavenProject artifactProject, final String checkValue ) throws AssemblyFormattingException { final ArtifactMock artifactMock = new ArtifactMock( mockManager, artifactProject.getGroupId(), artifactProject.getArtifactId(), artifactProject.getVersion(), extension, classifier, false, null ); final ArtifactMock moduleArtifactMock = new ArtifactMock( mockManager, moduleProject.getGroupId(), moduleProject.getArtifactId(), moduleProject.getVersion(), "jar", false, null ); final MavenSession session = mockManager.createMock( MavenSession.class ); expect( session.getExecutionProperties() ).andReturn( System.getProperties() ).anyTimes(); expect( session.getUserProperties() ).andReturn( new Properties() ).anyTimes(); final AssemblerConfigurationSource cs = mockManager.createMock( AssemblerConfigurationSource.class ); expect( cs.getMavenSession() ).andReturn( session ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( cs, mainProject ); mockManager.replayAll(); final String result = AssemblyFormatUtils.evaluateFileNameMapping( expression, artifactMock.getArtifact(), mainProject, moduleArtifactMock.getArtifact(), cs, AssemblyFormatUtils.moduleProjectInterpolator( moduleProject ), AssemblyFormatUtils.artifactProjectInterpolator( artifactProject ) ); /* final String result = AssemblyFormatUtils.evaluateFileNameMapping( expression, moduleArtifactInterpolator( moduleArtifactMock.getArtifact() ), moduleProjectInterpolator( moduleProject ), artifactInterpolator( artifactMock.getArtifact() ), artifactProjectInterpolator( artifactProject ), mainArtifactPropsOnly( mainProject ), classifierRules( artifactMock.getArtifact() ), FixedStringSearchInterpolator.create( new PropertiesBasedValueSource( System.getProperties() )) ); */ assertEquals( checkValue, result ); mockManager.verifyAll(); // clear out for next call. mockManager.resetAll(); } private void verifyOutputDir( final String outDir, final String finalName, final String projectFinalName, final String checkValue ) throws AssemblyFormattingException { verifyOutputDirUsingMainProject( outDir, finalName, null, null, null, projectFinalName, null, checkValue ); } private void verifyOutputDirUsingMainProject( final String outDir, final String finalName, final String groupId, final String artifactId, final String version, final String projectFinalName, final Properties properties, final String checkValue ) throws AssemblyFormattingException { final MavenProject project = createProject( groupId, artifactId, version, properties ); if ( projectFinalName != null ) { final Build build = new Build(); build.setFinalName( projectFinalName ); project.getModel().setBuild( build ); } final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null ); final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null ); verifyOutputDir( outDir, finalName, project, moduleProject, artifactProject, checkValue ); } private void verifyOutputDirUsingModuleProject( final String outDir, final String finalName, final String groupId, final String artifactId, final String version, final String projectFinalName, final Properties properties, final String checkValue ) throws AssemblyFormattingException { final MavenProject project = createProject( groupId, artifactId, version, properties ); if ( projectFinalName != null ) { final Build build = new Build(); build.setFinalName( projectFinalName ); project.getModel().setBuild( build ); } final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null ); final MavenProject artifactProject = createProject( "unknown", "unknown", "unknown", null ); verifyOutputDir( outDir, finalName, mainProject, project, artifactProject, checkValue ); } private void verifyOutputDirUsingArtifactProject( final String outDir, final String finalName, final String groupId, final String artifactId, final String version, final String projectFinalName, final Properties properties, final String checkValue ) throws AssemblyFormattingException { final MavenProject project = createProject( groupId, artifactId, version, properties ); if ( projectFinalName != null ) { final Build build = new Build(); build.setFinalName( projectFinalName ); project.getModel().setBuild( build ); } final MavenProject moduleProject = createProject( "unknown", "unknown", "unknown", null ); final MavenProject mainProject = createProject( "unknown", "unknown", "unknown", null ); verifyOutputDir( outDir, finalName, mainProject, moduleProject, project, checkValue ); } private void verifyOutputDir( final String outDir, final String finalName, final MavenProject mainProject, final MavenProject moduleProject, final MavenProject artifactProject, final String checkValue ) throws AssemblyFormattingException { final MavenSession session = mockManager.createMock( MavenSession.class ); expect( session.getExecutionProperties() ).andReturn( System.getProperties() ).anyTimes(); expect( session.getUserProperties() ).andReturn( new Properties() ).anyTimes(); final AssemblerConfigurationSource cs = mockManager.createMock( AssemblerConfigurationSource.class ); expect( cs.getMavenSession() ).andReturn( session ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( cs, mainProject ); String result; mockManager.replayAll(); result = AssemblyFormatUtils.getOutputDirectory( outDir, finalName, cs, AssemblyFormatUtils.moduleProjectInterpolator( moduleProject ), AssemblyFormatUtils.artifactProjectInterpolator( artifactProject ) ); assertEquals( checkValue, result ); mockManager.verifyAll(); mockManager.resetAll(); } private void verifyDistroName( final String assemblyId, final String finalName, final boolean appendAssemblyId, final String checkValue ) { final MockAndControlForGetDistroName mac = new MockAndControlForGetDistroName( finalName, appendAssemblyId ); mockManager.replayAll(); final Assembly assembly = new Assembly(); assembly.setId( assemblyId ); final String result = AssemblyFormatUtils.getDistributionName( assembly, mac.configSource ); assertEquals( checkValue, result ); mockManager.verifyAll(); // clear it out for the next call. mockManager.resetAll(); } public void testWindowsPath() { assertTrue( AssemblyFormatUtils.isWindowsPath( "C:\foobar" ) ); } public void testLinuxRootReferencePath() { assertTrue( AssemblyFormatUtils.isLinuxRootReference( "/etc/home" ) ); } private final class MockAndControlForGetDistroName { final AssemblerConfigurationSource configSource; private final boolean isAssemblyIdAppended; private final String finalName; public MockAndControlForGetDistroName( final String finalName, final boolean isAssemblyIdAppended ) { this.finalName = finalName; this.isAssemblyIdAppended = isAssemblyIdAppended; configSource = mockManager.createMock( AssemblerConfigurationSource.class ); enableExpectations(); } private void enableExpectations() { expect( configSource.isAssemblyIdAppended() ).andReturn( isAssemblyIdAppended ).atLeastOnce(); expect( configSource.getFinalName() ).andReturn( finalName ).atLeastOnce(); } } }
apache/seatunnel
36,917
seatunnel-engine/seatunnel-engine-client/src/test/java/org/apache/seatunnel/engine/client/SeaTunnelClientTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.seatunnel.engine.client; import org.apache.seatunnel.shade.com.fasterxml.jackson.core.JsonProcessingException; import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.JsonNode; import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.ObjectMapper; import org.apache.seatunnel.shade.org.apache.commons.lang3.StringUtils; import org.apache.seatunnel.common.config.Common; import org.apache.seatunnel.common.config.DeployMode; import org.apache.seatunnel.common.utils.RetryUtils; import org.apache.seatunnel.engine.client.job.ClientJobExecutionEnvironment; import org.apache.seatunnel.engine.client.job.ClientJobProxy; import org.apache.seatunnel.engine.client.job.JobClient; import org.apache.seatunnel.engine.common.Constant; import org.apache.seatunnel.engine.common.config.ConfigProvider; import org.apache.seatunnel.engine.common.config.JobConfig; import org.apache.seatunnel.engine.common.config.SeaTunnelConfig; import org.apache.seatunnel.engine.common.job.JobStatus; import org.apache.seatunnel.engine.common.utils.concurrent.CompletableFuture; import org.apache.seatunnel.engine.core.dag.logical.LogicalDag; import org.apache.seatunnel.engine.core.job.JobDAGInfo; import org.apache.seatunnel.engine.server.SeaTunnelNodeContext; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledOnOs; import org.junit.jupiter.api.condition.OS; import org.junitpioneer.jupiter.SetEnvironmentVariable; import com.hazelcast.client.config.ClientConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.instance.impl.HazelcastInstanceFactory; import lombok.extern.slf4j.Slf4j; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Spliterators; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import static org.apache.seatunnel.api.common.metrics.MetricNames.SINK_WRITE_BYTES; import static org.apache.seatunnel.api.common.metrics.MetricNames.SINK_WRITE_BYTES_PER_SECONDS; import static org.apache.seatunnel.api.common.metrics.MetricNames.SINK_WRITE_COUNT; import static org.apache.seatunnel.api.common.metrics.MetricNames.SINK_WRITE_QPS; import static org.apache.seatunnel.api.common.metrics.MetricNames.SOURCE_RECEIVED_BYTES; import static org.apache.seatunnel.api.common.metrics.MetricNames.SOURCE_RECEIVED_BYTES_PER_SECONDS; import static org.apache.seatunnel.api.common.metrics.MetricNames.SOURCE_RECEIVED_COUNT; import static org.apache.seatunnel.api.common.metrics.MetricNames.SOURCE_RECEIVED_QPS; import static org.awaitility.Awaitility.await; @DisabledOnOs(OS.WINDOWS) @Slf4j public class SeaTunnelClientTest { private static SeaTunnelConfig SEATUNNEL_CONFIG = ConfigProvider.locateAndGetSeaTunnelConfig(); private static HazelcastInstance INSTANCE; @BeforeAll public static void beforeClass() throws Exception { SEATUNNEL_CONFIG .getHazelcastConfig() .setClusterName(ContentFormatUtilTest.getClusterName("SeaTunnelClientTest")); INSTANCE = HazelcastInstanceFactory.newHazelcastInstance( SEATUNNEL_CONFIG.getHazelcastConfig(), Thread.currentThread().getName(), new SeaTunnelNodeContext(ConfigProvider.locateAndGetSeaTunnelConfig())); } private SeaTunnelClient createSeaTunnelClient() { ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); clientConfig.setClusterName(ContentFormatUtilTest.getClusterName("SeaTunnelClientTest")); return new SeaTunnelClient(clientConfig); } @Test public void testSayHello() { String msg = "Hello world"; try (SeaTunnelClient seaTunnelClient = createSeaTunnelClient()) { String s = seaTunnelClient.printMessageToMaster(msg); Assertions.assertEquals(msg, s); } } @Test public void testExecuteJob() { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/client_test.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("testExecuteJob"); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); CompletableFuture<JobStatus> objectCompletableFuture = CompletableFuture.supplyAsync( () -> { return clientJobProxy.waitForJobComplete(); }); await().atMost(180000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertTrue( objectCompletableFuture.isDone() && JobStatus.FINISHED.equals( objectCompletableFuture.get()))); } catch (ExecutionException | InterruptedException e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testGetJobState() { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/client_test.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("testGetJobState"); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); long jobId = clientJobProxy.getJobId(); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertTrue( jobClient.getJobDetailStatus(jobId).contains("RUNNING") && jobClient .listJobStatus(true) .contains("RUNNING"))); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertTrue( jobClient.getJobDetailStatus(jobId).contains("FINISHED") && jobClient .listJobStatus(true) .contains("FINISHED"))); } catch (ExecutionException | InterruptedException e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testGetJobMetrics() { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/client_test.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("testGetJobMetrics"); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); long jobId = clientJobProxy.getJobId(); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertTrue( jobClient.getJobDetailStatus(jobId).contains("FINISHED") && jobClient .listJobStatus(true) .contains("FINISHED"))); String jobMetrics = jobClient.getJobMetrics(jobId); log.info(jobMetrics); Assertions.assertTrue(jobMetrics.contains(SOURCE_RECEIVED_COUNT)); Assertions.assertTrue(jobMetrics.contains(SOURCE_RECEIVED_QPS)); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_COUNT)); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_QPS)); } catch (ExecutionException | InterruptedException e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testGetRunningJobMetrics() throws ExecutionException, InterruptedException { Common.setDeployMode(DeployMode.CLUSTER); String filePath = ContentFormatUtilTest.getResource("/batch_fake_to_console.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("fake_to_console1"); try (SeaTunnelClient seaTunnelClient = createSeaTunnelClient()) { JobClient jobClient = seaTunnelClient.getJobClient(); ClientJobProxy execute1 = seaTunnelClient .createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG) .execute(); long jobId1 = execute1.getJobId(); execute1.waitForJobComplete(); filePath = ContentFormatUtilTest.getResource("streaming_fake_to_console.conf"); jobConfig = new JobConfig(); jobConfig.setName("fake_to_console2"); ClientJobProxy execute2 = seaTunnelClient .createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG) .execute(); ClientJobProxy execute3 = seaTunnelClient .createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG) .execute(); long jobId2 = execute2.getJobId(); long jobId3 = execute3.getJobId(); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertTrue( jobClient.getJobStatus(jobId1).equals("FINISHED") && jobClient .getJobStatus(jobId2) .equals("RUNNING") && jobClient .getJobStatus(jobId3) .equals("RUNNING"))); log.info(jobClient.getRunningJobMetrics()); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { String runningJobMetrics = jobClient.getRunningJobMetrics(); Assertions.assertTrue( runningJobMetrics.contains(jobId2 + "") && runningJobMetrics.contains(jobId3 + "")); }); jobClient.cancelJob(jobId2); jobClient.cancelJob(jobId3); } } @Test public void testCancelJob() throws ExecutionException, InterruptedException { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/streaming_fake_to_console.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("testCancelJob"); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); long jobId = clientJobProxy.getJobId(); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "RUNNING", jobClient.getJobStatus(jobId))); jobClient.cancelJob(jobId); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "CANCELED", jobClient.getJobStatus(jobId))); } catch (Exception e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testSetJobId() throws ExecutionException, InterruptedException { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/streaming_fake_to_console.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("testSetJobId"); long jobId = 12345; SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext( filePath, new ArrayList<>(), jobConfig, SEATUNNEL_CONFIG, jobId); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Assertions.assertEquals(jobId, clientJobProxy.getJobId()); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "RUNNING", jobClient.getJobStatus(jobId))); jobClient.cancelJob(jobId); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "CANCELED", jobClient.getJobStatus(jobId))); } catch (Exception e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testSetJobIdDuplicate() { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/streaming_fake_to_console.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("testSetJobId"); long jobId = System.currentTimeMillis(); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext( filePath, new ArrayList<>(), jobConfig, SEATUNNEL_CONFIG, jobId); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Assertions.assertEquals(jobId, clientJobProxy.getJobId()); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "RUNNING", jobClient.getJobStatus(jobId))); jobClient.cancelJob(jobId); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "CANCELED", jobClient.getJobStatus(jobId))); ClientJobExecutionEnvironment jobExecutionEnvWithSameJobId = seaTunnelClient.createExecutionContext( filePath, new ArrayList<>(), jobConfig, SEATUNNEL_CONFIG, jobId); Exception exception = Assertions.assertThrows( Exception.class, () -> jobExecutionEnvWithSameJobId.execute().waitForJobCompleteV2()); Assertions.assertTrue( exception .getCause() .getMessage() .contains( String.format( "The job id %s has already been submitted and is not starting with a savepoint.", jobId))); } catch (Exception e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testGetJobInfo() { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/client_test.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("fake_to_console"); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); long jobId = clientJobProxy.getJobId(); await().atMost(10, TimeUnit.SECONDS) .untilAsserted( () -> { Assertions.assertNotNull(jobClient.getJobInfo(jobId)); }); await().atMost(720000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Thread.sleep(1000); log.info( "======================job status:" + jobClient.getJobDetailStatus(jobId)); log.info( "======================list job status:\n" + jobClient.listJobStatus(true)); Assertions.assertTrue( jobClient.getJobDetailStatus(jobId).contains("FINISHED") && jobClient .listJobStatus(true) .contains("FINISHED")); }); // Finished JobDAGInfo jobInfo = jobClient.getJobInfo(jobId); Assertions.assertTrue( StringUtils.isNotEmpty(new ObjectMapper().writeValueAsString(jobInfo))); } catch (Exception e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testJarsInEnvAddedToCommonJars() { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/client_test_with_jars.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("client_test_with_jars"); try (SeaTunnelClient seaTunnelClient = createSeaTunnelClient()) { LogicalDag logicalDag = seaTunnelClient .createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG) .getLogicalDag(); Assertions.assertIterableEquals( Arrays.asList("file:/tmp/test.jar", "file:/tmp/test2.jar"), logicalDag.getLogicalVertexMap().values().iterator().next().getAction() .getJarUrls().stream() .map(URL::toString) .collect(Collectors.toList())); } } @Test public void testSavePointAndRestoreWithSavePoint() throws Exception { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/streaming_fake_to_console.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("streaming_fake_to_console.conf"); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); long jobId = clientJobProxy.getJobId(); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "RUNNING", jobClient.getJobStatus(jobId))); RetryUtils.retryWithException( () -> { jobClient.savePointJob(jobId); return null; }, new RetryUtils.RetryMaterial( Constant.OPERATION_RETRY_TIME, true, exception -> { // If we do savepoint for a Job which initialization has not been // completed yet, we will get an error. // In this test case, we need retry savepoint. return exception .getCause() .getMessage() .contains("Task not all ready, savepoint error"); }, Constant.OPERATION_RETRY_SLEEP)); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "SAVEPOINT_DONE", jobClient.getJobStatus(jobId))); Thread.sleep(1000); seaTunnelClient .restoreExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG, jobId) .execute(); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "RUNNING", jobClient.getJobStatus(jobId))); jobClient.cancelJob(jobId); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( "CANCELED", jobClient.getJobStatus(jobId))); } catch (Exception e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test public void testGetMultiTableJobMetrics() { Common.setDeployMode(DeployMode.CLIENT); String filePath = ContentFormatUtilTest.getResource("/batch_fake_multi_table_to_console.conf"); JobConfig jobConfig = new JobConfig(); jobConfig.setName("testGetMultiTableJobMetrics"); SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); JobClient jobClient = seaTunnelClient.getJobClient(); try { ClientJobExecutionEnvironment jobExecutionEnv = seaTunnelClient.createExecutionContext(filePath, jobConfig, SEATUNNEL_CONFIG); final ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); long jobId = clientJobProxy.getJobId(); await().atMost(30000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertTrue( jobClient.getJobDetailStatus(jobId).contains("FINISHED") && jobClient .listJobStatus(true) .contains("FINISHED"))); String jobMetrics = jobClient.getJobMetrics(jobId); Assertions.assertTrue(jobMetrics.contains(SOURCE_RECEIVED_COUNT + "#fake.table1")); Assertions.assertTrue( jobMetrics.contains(SOURCE_RECEIVED_COUNT + "#fake.public.table2")); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_COUNT + "#fake.table1")); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_COUNT + "#fake.public.table2")); Assertions.assertTrue(jobMetrics.contains(SOURCE_RECEIVED_BYTES + "#fake.table1")); Assertions.assertTrue( jobMetrics.contains(SOURCE_RECEIVED_BYTES + "#fake.public.table2")); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_BYTES + "#fake.table1")); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_BYTES + "#fake.public.table2")); Assertions.assertTrue(jobMetrics.contains(SOURCE_RECEIVED_QPS + "#fake.table1")); Assertions.assertTrue(jobMetrics.contains(SOURCE_RECEIVED_QPS + "#fake.public.table2")); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_QPS + "#fake.table1")); Assertions.assertTrue(jobMetrics.contains(SINK_WRITE_QPS + "#fake.public.table2")); Assertions.assertTrue( jobMetrics.contains(SOURCE_RECEIVED_BYTES_PER_SECONDS + "#fake.table1")); Assertions.assertTrue( jobMetrics.contains(SOURCE_RECEIVED_BYTES_PER_SECONDS + "#fake.public.table2")); Assertions.assertTrue( jobMetrics.contains(SINK_WRITE_BYTES_PER_SECONDS + "#fake.table1")); Assertions.assertTrue( jobMetrics.contains(SINK_WRITE_BYTES_PER_SECONDS + "#fake.public.table2")); log.info("jobMetrics : {}", jobMetrics); JsonNode jobMetricsStr = new ObjectMapper().readTree(jobMetrics); List<String> metricNameList = StreamSupport.stream( Spliterators.spliteratorUnknownSize( jobMetricsStr.fieldNames(), 0), false) .collect(Collectors.toList()); Map<String, Long> totalCount = metricNameList.stream() .filter(metrics -> !metrics.contains("#")) .collect( Collectors.toMap( metrics -> metrics, metrics -> StreamSupport.stream( jobMetricsStr .get(metrics) .spliterator(), false) .mapToLong( value -> value.get("value") .asLong()) .sum())); Map<String, Long> tableCount = metricNameList.stream() .filter(metrics -> metrics.contains("#")) .collect( Collectors.toMap( metrics -> metrics, metrics -> StreamSupport.stream( jobMetricsStr .get(metrics) .spliterator(), false) .mapToLong( value -> value.get("value") .asLong()) .sum())); Assertions.assertEquals( totalCount.get(SOURCE_RECEIVED_COUNT), tableCount.entrySet().stream() .filter(e -> e.getKey().startsWith(SOURCE_RECEIVED_COUNT + "#")) .mapToLong(Map.Entry::getValue) .sum()); Assertions.assertEquals( totalCount.get(SINK_WRITE_COUNT), tableCount.entrySet().stream() .filter(e -> e.getKey().startsWith(SINK_WRITE_COUNT + "#")) .mapToLong(Map.Entry::getValue) .sum()); Assertions.assertEquals( totalCount.get(SOURCE_RECEIVED_BYTES), tableCount.entrySet().stream() .filter(e -> e.getKey().startsWith(SOURCE_RECEIVED_BYTES + "#")) .mapToLong(Map.Entry::getValue) .sum()); Assertions.assertEquals( totalCount.get(SINK_WRITE_BYTES), tableCount.entrySet().stream() .filter(e -> e.getKey().startsWith(SINK_WRITE_BYTES + "#")) .mapToLong(Map.Entry::getValue) .sum()); // Instantaneous rates in the same direction are directly added // The size does not fluctuate more than %2 of the total value Assertions.assertTrue( Math.abs( totalCount.get(SOURCE_RECEIVED_QPS) - tableCount.entrySet().stream() .filter( e -> e.getKey() .startsWith( SOURCE_RECEIVED_QPS + "#")) .mapToLong(Map.Entry::getValue) .sum()) < totalCount.get(SOURCE_RECEIVED_QPS) * 0.02); Assertions.assertTrue( Math.abs( totalCount.get(SINK_WRITE_QPS) - tableCount.entrySet().stream() .filter( e -> e.getKey() .startsWith( SINK_WRITE_QPS + "#")) .mapToLong(Map.Entry::getValue) .sum()) < totalCount.get(SINK_WRITE_QPS) * 0.02); Assertions.assertTrue( Math.abs( totalCount.get(SOURCE_RECEIVED_BYTES_PER_SECONDS) - tableCount.entrySet().stream() .filter( e -> e.getKey() .startsWith( SOURCE_RECEIVED_BYTES_PER_SECONDS + "#")) .mapToLong(Map.Entry::getValue) .sum()) < totalCount.get(SOURCE_RECEIVED_BYTES_PER_SECONDS) * 0.02); Assertions.assertTrue( Math.abs( totalCount.get(SINK_WRITE_BYTES_PER_SECONDS) - tableCount.entrySet().stream() .filter( e -> e.getKey() .startsWith( SINK_WRITE_BYTES_PER_SECONDS + "#")) .mapToLong(Map.Entry::getValue) .sum()) < totalCount.get(SINK_WRITE_BYTES_PER_SECONDS) * 0.02); } catch (ExecutionException | InterruptedException | JsonProcessingException e) { throw new RuntimeException(e); } finally { seaTunnelClient.close(); } } @Test @SetEnvironmentVariable( key = "ST_DOCKER_MEMBER_LIST", value = "127.0.0.1,127.0.0.2,127.0.0.3,127.0.0.4") public void testDockerEnvOverwrite() { ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); Assertions.assertEquals(4, clientConfig.getNetworkConfig().getAddresses().size()); } @AfterAll public static void after() { INSTANCE.shutdown(); } }
apache/tomcat80
36,792
java/org/apache/catalina/authenticator/AuthenticatorBase.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.authenticator; import java.io.IOException; import java.security.Principal; import java.security.cert.X509Certificate; import java.util.Date; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.catalina.Authenticator; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Globals; import org.apache.catalina.LifecycleException; import org.apache.catalina.Manager; import org.apache.catalina.Realm; import org.apache.catalina.Session; import org.apache.catalina.TomcatPrincipal; import org.apache.catalina.Valve; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response; import org.apache.catalina.realm.GenericPrincipal; import org.apache.catalina.util.ConcurrentDateFormat; import org.apache.catalina.util.SessionIdGeneratorBase; import org.apache.catalina.util.StandardSessionIdGenerator; import org.apache.catalina.valves.ValveBase; import org.apache.coyote.ActionCode; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.descriptor.web.LoginConfig; import org.apache.tomcat.util.descriptor.web.SecurityConstraint; import org.apache.tomcat.util.res.StringManager; /** * Basic implementation of the <b>Valve</b> interface that enforces the * <code>&lt;security-constraint&gt;</code> elements in the web application * deployment descriptor. This functionality is implemented as a Valve * so that it can be omitted in environments that do not require these * features. Individual implementations of each supported authentication * method can subclass this base class as required. * <p> * <b>USAGE CONSTRAINT</b>: When this class is utilized, the Context to * which it is attached (or a parent Container in a hierarchy) must have an * associated Realm that can be used for authenticating users and enumerating * the roles to which they have been assigned. * <p> * <b>USAGE CONSTRAINT</b>: This Valve is only useful when processing HTTP * requests. Requests of any other type will simply be passed through. * * @author Craig R. McClanahan */ public abstract class AuthenticatorBase extends ValveBase implements Authenticator { private final Log log = LogFactory.getLog(AuthenticatorBase.class); // must not be static //------------------------------------------------------ Constructor public AuthenticatorBase() { super(true); } // ----------------------------------------------------- Instance Variables /** * Authentication header */ protected static final String AUTH_HEADER_NAME = "WWW-Authenticate"; /** * Default authentication realm name. */ protected static final String REALM_NAME = "Authentication required"; /** * Should a session always be used once a user is authenticated? This may * offer some performance benefits since the session can then be used to * cache the authenticated Principal, hence removing the need to * authenticate the user via the Realm on every request. This may be of help * for combinations such as BASIC authentication used with the JNDIRealm or * DataSourceRealms. However there will also be the performance cost of * creating and GC'ing the session. By default, a session will not be * created. */ protected boolean alwaysUseSession = false; /** * Should we cache authenticated Principals if the request is part of * an HTTP session? */ protected boolean cache = true; /** * Should the session ID, if any, be changed upon a successful * authentication to prevent a session fixation attack? */ protected boolean changeSessionIdOnAuthentication = true; /** * The Context to which this Valve is attached. */ protected Context context = null; /** * Flag to determine if we disable proxy caching, or leave the issue * up to the webapp developer. */ protected boolean disableProxyCaching = true; /** * Flag to determine if we disable proxy caching with headers incompatible * with IE. */ protected boolean securePagesWithPragma = false; /** * The Java class name of the secure random number generator class to be * used when generating SSO session identifiers. The random number generator * class must be self-seeding and have a zero-argument constructor. If not * specified, an instance of {@link java.security.SecureRandom} will be * generated. */ protected String secureRandomClass = null; /** * The name of the algorithm to use to create instances of * {@link java.security.SecureRandom} which are used to generate SSO session * IDs. If no algorithm is specified, SHA1PRNG is used. To use the platform * default (which may be SHA1PRNG), specify the empty string. If an invalid * algorithm and/or provider is specified the SecureRandom instances will be * created using the defaults. If that fails, the SecureRandom instances * will be created using platform defaults. */ protected String secureRandomAlgorithm = "SHA1PRNG"; /** * The name of the provider to use to create instances of * {@link java.security.SecureRandom} which are used to generate session SSO * IDs. If no algorithm is specified the of SHA1PRNG default is used. If an * invalid algorithm and/or provider is specified the SecureRandom instances * will be created using the defaults. If that fails, the SecureRandom * instances will be created using platform defaults. */ protected String secureRandomProvider = null; protected SessionIdGeneratorBase sessionIdGenerator = null; /** * The string manager for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); /** * The SingleSignOn implementation in our request processing chain, * if there is one. */ protected SingleSignOn sso = null; /** * "Expires" header always set to Date(1), so generate once only */ private static final String DATE_ONE = ConcurrentDateFormat.formatRfc1123(new Date(1)); protected static String getRealmName(Context context) { if (context == null) { // Very unlikely return REALM_NAME; } LoginConfig config = context.getLoginConfig(); if (config == null) { return REALM_NAME; } String result = config.getRealmName(); if (result == null) { return REALM_NAME; } return result; } // ------------------------------------------------------------- Properties public boolean getAlwaysUseSession() { return alwaysUseSession; } public void setAlwaysUseSession(boolean alwaysUseSession) { this.alwaysUseSession = alwaysUseSession; } /** * Return the cache authenticated Principals flag. */ public boolean getCache() { return (this.cache); } /** * Set the cache authenticated Principals flag. * * @param cache The new cache flag */ public void setCache(boolean cache) { this.cache = cache; } /** * Return the Container to which this Valve is attached. */ @Override public Container getContainer() { return (this.context); } /** * Set the Container to which this Valve is attached. * * @param container The container to which we are attached */ @Override public void setContainer(Container container) { if (container != null && !(container instanceof Context)) { throw new IllegalArgumentException (sm.getString("authenticator.notContext")); } super.setContainer(container); this.context = (Context) container; } /** * Return the flag that states if we add headers to disable caching by * proxies. */ public boolean getDisableProxyCaching() { return disableProxyCaching; } /** * Set the value of the flag that states if we add headers to disable * caching by proxies. * @param nocache <code>true</code> if we add headers to disable proxy * caching, <code>false</code> if we leave the headers alone. */ public void setDisableProxyCaching(boolean nocache) { disableProxyCaching = nocache; } /** * Return the flag that states, if proxy caching is disabled, what headers * we add to disable the caching. */ public boolean getSecurePagesWithPragma() { return securePagesWithPragma; } /** * Set the value of the flag that states what headers we add to disable * proxy caching. * @param securePagesWithPragma <code>true</code> if we add headers which * are incompatible with downloading office documents in IE under SSL but * which fix a caching problem in Mozilla. */ public void setSecurePagesWithPragma(boolean securePagesWithPragma) { this.securePagesWithPragma = securePagesWithPragma; } /** * Return the flag that states if we should change the session ID of an * existing session upon successful authentication. * * @return <code>true</code> to change session ID upon successful * authentication, <code>false</code> to do not perform the change. */ public boolean getChangeSessionIdOnAuthentication() { return changeSessionIdOnAuthentication; } /** * Set the value of the flag that states if we should change the session ID * of an existing session upon successful authentication. * * @param changeSessionIdOnAuthentication * <code>true</code> to change session ID upon successful * authentication, <code>false</code> to do not perform the * change. */ public void setChangeSessionIdOnAuthentication( boolean changeSessionIdOnAuthentication) { this.changeSessionIdOnAuthentication = changeSessionIdOnAuthentication; } /** * Return the secure random number generator class name. */ public String getSecureRandomClass() { return (this.secureRandomClass); } /** * Set the secure random number generator class name. * * @param secureRandomClass The new secure random number generator class * name */ public void setSecureRandomClass(String secureRandomClass) { this.secureRandomClass = secureRandomClass; } /** * Return the secure random number generator algorithm name. */ public String getSecureRandomAlgorithm() { return secureRandomAlgorithm; } /** * Set the secure random number generator algorithm name. * * @param secureRandomAlgorithm The new secure random number generator * algorithm name */ public void setSecureRandomAlgorithm(String secureRandomAlgorithm) { this.secureRandomAlgorithm = secureRandomAlgorithm; } /** * Return the secure random number generator provider name. */ public String getSecureRandomProvider() { return secureRandomProvider; } /** * Set the secure random number generator provider name. * * @param secureRandomProvider The new secure random number generator * provider name */ public void setSecureRandomProvider(String secureRandomProvider) { this.secureRandomProvider = secureRandomProvider; } // --------------------------------------------------------- Public Methods /** * Enforce the security restrictions in the web application deployment * descriptor of our associated Context. * * @param request Request to be processed * @param response Response to be processed * * @exception IOException if an input/output error occurs * @exception ServletException if thrown by a processing element */ @Override public void invoke(Request request, Response response) throws IOException, ServletException { if (log.isDebugEnabled()) { log.debug("Security checking request " + request.getMethod() + " " + request.getRequestURI()); } // Have we got a cached authenticated Principal to record? if (cache) { Principal principal = request.getUserPrincipal(); if (principal == null) { Session session = request.getSessionInternal(false); if (session != null) { principal = session.getPrincipal(); if (principal != null) { if (log.isDebugEnabled()) { log.debug("We have cached auth type " + session.getAuthType() + " for principal " + session.getPrincipal()); } request.setAuthType(session.getAuthType()); request.setUserPrincipal(principal); } } } } // Special handling for form-based logins to deal with the case // where the login form (and therefore the "j_security_check" URI // to which it submits) might be outside the secured area String contextPath = this.context.getPath(); String decodedRequestURI = request.getDecodedRequestURI(); if (decodedRequestURI.startsWith(contextPath) && decodedRequestURI.endsWith(Constants.FORM_ACTION)) { if (!authenticate(request, response)) { if (log.isDebugEnabled()) { log.debug(" Failed authenticate() test ??" + decodedRequestURI ); } return; } } // Special handling for form-based logins to deal with the case where // a resource is protected for some HTTP methods but not protected for // GET which is used after authentication when redirecting to the // protected resource. // TODO: This is similar to the FormAuthenticator.matchRequest() logic // Is there a way to remove the duplication? Session session = request.getSessionInternal(false); if (session != null) { SavedRequest savedRequest = (SavedRequest) session.getNote(Constants.FORM_REQUEST_NOTE); if (savedRequest != null && decodedRequestURI.equals(savedRequest.getDecodedRequestURI()) && !authenticate(request, response)) { if (log.isDebugEnabled()) { log.debug(" Failed authenticate() test"); } /* * ASSERT: Authenticator already set the appropriate * HTTP status code, so we do not have to do anything * special */ return; } } Realm realm = this.context.getRealm(); // Is this request URI subject to a security constraint? SecurityConstraint [] constraints = realm.findSecurityConstraints(request, this.context); if (constraints == null && !context.getPreemptiveAuthentication()) { if (log.isDebugEnabled()) { log.debug(" Not subject to any constraint"); } getNext().invoke(request, response); return; } // Make sure that constrained resources are not cached by web proxies // or browsers as caching can provide a security hole if (constraints != null && disableProxyCaching && !"POST".equalsIgnoreCase(request.getMethod())) { if (securePagesWithPragma) { // Note: These can cause problems with downloading files with IE response.setHeader("Pragma", "No-cache"); response.setHeader("Cache-Control", "no-cache"); } else { response.setHeader("Cache-Control", "private"); } response.setHeader("Expires", DATE_ONE); } int i; if (constraints != null) { // Enforce any user data constraint for this security constraint if (log.isDebugEnabled()) { log.debug(" Calling hasUserDataPermission()"); } if (!realm.hasUserDataPermission(request, response, constraints)) { if (log.isDebugEnabled()) { log.debug(" Failed hasUserDataPermission() test"); } /* * ASSERT: Authenticator already set the appropriate * HTTP status code, so we do not have to do anything special */ return; } } // Since authenticate modifies the response on failure, // we have to check for allow-from-all first. boolean authRequired; if (constraints == null) { authRequired = false; } else { authRequired = true; for(i=0; i < constraints.length && authRequired; i++) { if(!constraints[i].getAuthConstraint()) { authRequired = false; break; } else if(!constraints[i].getAllRoles() && !constraints[i].getAuthenticatedUsers()) { String [] roles = constraints[i].findAuthRoles(); if(roles == null || roles.length == 0) { authRequired = false; break; } } } } if (!authRequired && context.getPreemptiveAuthentication()) { authRequired = request.getCoyoteRequest().getMimeHeaders().getValue( "authorization") != null; } if (!authRequired && context.getPreemptiveAuthentication() && HttpServletRequest.CLIENT_CERT_AUTH.equals(getAuthMethod())) { X509Certificate[] certs = getRequestCertificates(request); authRequired = certs != null && certs.length > 0; } if(authRequired) { if (log.isDebugEnabled()) { log.debug(" Calling authenticate()"); } if (!authenticate(request, response)) { if (log.isDebugEnabled()) { log.debug(" Failed authenticate() test"); } /* * ASSERT: Authenticator already set the appropriate * HTTP status code, so we do not have to do anything * special */ return; } } if (constraints != null) { if (log.isDebugEnabled()) { log.debug(" Calling accessControl()"); } if (!realm.hasResourcePermission(request, response, constraints, this.context)) { if (log.isDebugEnabled()) { log.debug(" Failed accessControl() test"); } /* * ASSERT: AccessControl method has already set the * appropriate HTTP status code, so we do not have to do * anything special */ return; } } // Any and all specified constraints have been satisfied if (log.isDebugEnabled()) { log.debug(" Successfully passed all security constraints"); } getNext().invoke(request, response); } // ------------------------------------------------------ Protected Methods /** * Look for the X509 certificate chain in the Request under the key * <code>javax.servlet.request.X509Certificate</code>. If not found, trigger * extracting the certificate chain from the Coyote request. * * @param request Request to be processed * * @return The X509 certificate chain if found, <code>null</code> * otherwise. */ protected X509Certificate[] getRequestCertificates(final Request request) throws IllegalStateException { X509Certificate certs[] = (X509Certificate[]) request.getAttribute(Globals.CERTIFICATES_ATTR); if ((certs == null) || (certs.length < 1)) { try { request.getCoyoteRequest().action(ActionCode.REQ_SSL_CERTIFICATE, null); certs = (X509Certificate[]) request.getAttribute(Globals.CERTIFICATES_ATTR); } catch (IllegalStateException ise) { // Request body was too large for save buffer // Return null which will trigger an auth failure } } return certs; } /** * Associate the specified single sign on identifier with the * specified Session. * * @param ssoId Single sign on identifier * @param session Session to be associated */ protected void associate(String ssoId, Session session) { if (sso == null) { return; } sso.associate(ssoId, session); } /** * Authenticate the user making this request, based on the login * configuration of the {@link Context} with which this Authenticator is * associated. Return <code>true</code> if any specified constraint has * been satisfied, or <code>false</code> if we have created a response * challenge already. * * @param request Request we are processing * @param response Response we are populating * * @exception IOException if an input/output error occurs */ @Override public abstract boolean authenticate(Request request, HttpServletResponse response) throws IOException; /** * Check to see if the user has already been authenticated earlier in the * processing chain or if there is enough information available to * authenticate the user without requiring further user interaction. * * @param request The current request * @param response The current response * @param useSSO Should information available from SSO be used to attempt * to authenticate the current user? * * @return <code>true</code> if the user was authenticated via the cache, * otherwise <code>false</code> */ protected boolean checkForCachedAuthentication(Request request, HttpServletResponse response, boolean useSSO) { // Has the user already been authenticated? Principal principal = request.getUserPrincipal(); String ssoId = (String) request.getNote(Constants.REQ_SSOID_NOTE); if (principal != null) { if (log.isDebugEnabled()) { log.debug(sm.getString("authenticator.check.found", principal.getName())); } // Associate the session with any existing SSO session. Even if // useSSO is false, this will ensure coordinated session // invalidation at log out. if (ssoId != null) { associate(ssoId, request.getSessionInternal(true)); } return true; } // Is there an SSO session against which we can try to reauthenticate? if (useSSO && ssoId != null) { if (log.isDebugEnabled()) { log.debug(sm.getString("authenticator.check.sso", ssoId)); } /* Try to reauthenticate using data cached by SSO. If this fails, either the original SSO logon was of DIGEST or SSL (which we can't reauthenticate ourselves because there is no cached username and password), or the realm denied the user's reauthentication for some reason. In either case we have to prompt the user for a logon */ if (reauthenticateFromSSO(ssoId, request)) { return true; } } // Has the Connector provided a pre-authenticated Principal that now // needs to be authorized? if (request.getCoyoteRequest().getRemoteUserNeedsAuthorization()) { String username = request.getCoyoteRequest().getRemoteUser().toString(); if (username != null) { if (log.isDebugEnabled()) { log.debug(sm.getString("authenticator.check.authorize", username)); } Principal authorized = context.getRealm().authenticate(username); if (authorized == null) { // Realm doesn't recognise user. Create a user with no roles // from the authenticated user name if (log.isDebugEnabled()) { log.debug(sm.getString("authenticator.check.authorizeFail", username)); } authorized = new GenericPrincipal(username, null, null); } String authType = request.getAuthType(); if (authType == null || authType.length() == 0) { authType = getAuthMethod(); } register(request, response, authorized, authType, username, null); return true; } } return false; } /** * Attempts reauthentication to the <code>Realm</code> using * the credentials included in argument <code>entry</code>. * * @param ssoId identifier of SingleSignOn session with which the * caller is associated * @param request the request that needs to be authenticated */ protected boolean reauthenticateFromSSO(String ssoId, Request request) { if (sso == null || ssoId == null) { return false; } boolean reauthenticated = false; Container parent = getContainer(); if (parent != null) { Realm realm = parent.getRealm(); if (realm != null) { reauthenticated = sso.reauthenticate(ssoId, realm, request); } } if (reauthenticated) { associate(ssoId, request.getSessionInternal(true)); if (log.isDebugEnabled()) { log.debug(" Reauthenticated cached principal '" + request.getUserPrincipal().getName() + "' with auth type '" + request.getAuthType() + "'"); } } return reauthenticated; } /** * Register an authenticated Principal and authentication type in our * request, in the current session (if there is one), and with our * SingleSignOn valve, if there is one. Set the appropriate cookie * to be returned. * * @param request The servlet request we are processing * @param response The servlet response we are generating * @param principal The authenticated Principal to be registered * @param authType The authentication type to be registered * @param username Username used to authenticate (if any) * @param password Password used to authenticate (if any) */ public void register(Request request, HttpServletResponse response, Principal principal, String authType, String username, String password) { if (log.isDebugEnabled()) { String name = (principal == null) ? "none" : principal.getName(); log.debug("Authenticated '" + name + "' with type '" + authType + "'"); } // Cache the authentication information in our request request.setAuthType(authType); request.setUserPrincipal(principal); Session session = request.getSessionInternal(false); if (session != null) { // If the principal is null then this is a logout. No need to change // the session ID. See BZ 59043. if (changeSessionIdOnAuthentication && principal != null) { String oldId = null; if (log.isDebugEnabled()) { oldId = session.getId(); } Manager manager = request.getContext().getManager(); manager.changeSessionId(session); request.changeSessionId(session.getId()); if (log.isDebugEnabled()) { log.debug(sm.getString("authenticator.changeSessionId", oldId, session.getId())); } } } else if (alwaysUseSession) { session = request.getSessionInternal(true); } // Cache the authentication information in our session, if any if (cache) { if (session != null) { session.setAuthType(authType); session.setPrincipal(principal); if (username != null) { session.setNote(Constants.SESS_USERNAME_NOTE, username); } else { session.removeNote(Constants.SESS_USERNAME_NOTE); } if (password != null) { session.setNote(Constants.SESS_PASSWORD_NOTE, password); } else { session.removeNote(Constants.SESS_PASSWORD_NOTE); } } } // Construct a cookie to be returned to the client if (sso == null) { return; } // Only create a new SSO entry if the SSO did not already set a note // for an existing entry (as it would do with subsequent requests // for DIGEST and SSL authenticated contexts) String ssoId = (String) request.getNote(Constants.REQ_SSOID_NOTE); if (ssoId == null) { // Construct a cookie to be returned to the client ssoId = sessionIdGenerator.generateSessionId(); Cookie cookie = new Cookie(Constants.SINGLE_SIGN_ON_COOKIE, ssoId); cookie.setMaxAge(-1); cookie.setPath("/"); // Bugzilla 41217 cookie.setSecure(request.isSecure()); // Bugzilla 34724 String ssoDomain = sso.getCookieDomain(); if(ssoDomain != null) { cookie.setDomain(ssoDomain); } // Configure httpOnly on SSO cookie using same rules as session cookies if (request.getServletContext().getSessionCookieConfig().isHttpOnly() || request.getContext().getUseHttpOnly()) { cookie.setHttpOnly(true); } response.addCookie(cookie); // Register this principal with our SSO valve sso.register(ssoId, principal, authType, username, password); request.setNote(Constants.REQ_SSOID_NOTE, ssoId); } else { if (principal == null) { // Registering a programmatic logout sso.deregister(ssoId); request.removeNote(Constants.REQ_SSOID_NOTE); return; } else { // Update the SSO session with the latest authentication data sso.update(ssoId, principal, authType, username, password); } } // Fix for Bug 10040 // Always associate a session with a new SSO reqistration. // SSO entries are only removed from the SSO registry map when // associated sessions are destroyed; if a new SSO entry is created // above for this request and the user never revisits the context, the // SSO entry will never be cleared if we don't associate the session if (session == null) { session = request.getSessionInternal(true); } sso.associate(ssoId, session); } @Override public void login(String username, String password, Request request) throws ServletException { Principal principal = doLogin(request, username, password); register(request, request.getResponse(), principal, getAuthMethod(), username, password); } protected abstract String getAuthMethod(); /** * Process the login request. * * @param request Associated request * @param username The user * @param password The password * @return The authenticated Principal * @throws ServletException */ protected Principal doLogin(Request request, String username, String password) throws ServletException { Principal p = context.getRealm().authenticate(username, password); if (p == null) { throw new ServletException(sm.getString("authenticator.loginFail")); } return p; } @Override public void logout(Request request) { Principal p = request.getPrincipal(); if (p instanceof TomcatPrincipal) { try { ((TomcatPrincipal) p).logout(); } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.debug(sm.getString("authenticator.tomcatPrincipalLogoutFail"), t); } } register(request, request.getResponse(), null, null, null, null); } /** * Start this component and implement the requirements * of {@link org.apache.catalina.util.LifecycleBase#startInternal()}. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */ @Override protected synchronized void startInternal() throws LifecycleException { // Look up the SingleSignOn implementation in our request processing // path, if there is one Container parent = context.getParent(); while ((sso == null) && (parent != null)) { Valve valves[] = parent.getPipeline().getValves(); for (int i = 0; i < valves.length; i++) { if (valves[i] instanceof SingleSignOn) { sso = (SingleSignOn) valves[i]; break; } } if (sso == null) { parent = parent.getParent(); } } if (log.isDebugEnabled()) { if (sso != null) { log.debug("Found SingleSignOn Valve at " + sso); } else { log.debug("No SingleSignOn Valve is present"); } } sessionIdGenerator = new StandardSessionIdGenerator(); sessionIdGenerator.setSecureRandomAlgorithm(getSecureRandomAlgorithm()); sessionIdGenerator.setSecureRandomClass(getSecureRandomClass()); sessionIdGenerator.setSecureRandomProvider(getSecureRandomProvider()); super.startInternal(); } /** * Stop this component and implement the requirements * of {@link org.apache.catalina.util.LifecycleBase#stopInternal()}. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */ @Override protected synchronized void stopInternal() throws LifecycleException { super.stopInternal(); sso = null; } }
apache/hadoop
36,642
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo; import org.apache.hadoop.classification.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueState; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.SchedulingRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ContainerUpdates; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Queue; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ContainerExpiredSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeResourceUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ReleaseContainerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.PendingAsk; import org.apache.hadoop.yarn.server.scheduler.SchedulerRequestKey; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.server.utils.Lock; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentSkipListMap; @LimitedPrivate("yarn") @Evolving @SuppressWarnings("unchecked") public class FifoScheduler extends AbstractYarnScheduler<FifoAppAttempt, FiCaSchedulerNode> implements Configurable { private static final Logger LOG = LoggerFactory.getLogger(FifoScheduler.class); private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); Configuration conf; private boolean usePortForNodeName; private ActiveUsersManager activeUsersManager; private static final String DEFAULT_QUEUE_NAME = "default"; private QueueMetrics metrics; private final ResourceCalculator resourceCalculator = new DefaultResourceCalculator(); private final Queue DEFAULT_QUEUE = new Queue() { @Override public String getQueueName() { return DEFAULT_QUEUE_NAME; } @Override public QueueMetrics getMetrics() { return metrics; } @Override public QueueInfo getQueueInfo( boolean includeChildQueues, boolean recursive) { QueueInfo queueInfo = recordFactory.newRecordInstance(QueueInfo.class); queueInfo.setQueueName(DEFAULT_QUEUE.getQueueName()); queueInfo.setCapacity(1.0f); Resource clusterResource = getClusterResource(); if (clusterResource.getMemorySize() == 0) { queueInfo.setCurrentCapacity(0.0f); } else { queueInfo.setCurrentCapacity((float) usedResource.getMemorySize() / clusterResource.getMemorySize()); } queueInfo.setMaximumCapacity(1.0f); queueInfo.setChildQueues(new ArrayList<QueueInfo>()); queueInfo.setQueueState(QueueState.RUNNING); return queueInfo; } public Map<QueueACL, AccessControlList> getQueueAcls() { Map<QueueACL, AccessControlList> acls = new HashMap<QueueACL, AccessControlList>(); for (QueueACL acl : QueueACL.values()) { acls.put(acl, new AccessControlList("*")); } return acls; } @Override public List<QueueUserACLInfo> getQueueUserAclInfo( UserGroupInformation unused) { QueueUserACLInfo queueUserAclInfo = recordFactory.newRecordInstance(QueueUserACLInfo.class); queueUserAclInfo.setQueueName(DEFAULT_QUEUE_NAME); queueUserAclInfo.setUserAcls(Arrays.asList(QueueACL.values())); return Collections.singletonList(queueUserAclInfo); } @Override public boolean hasAccess(QueueACL acl, UserGroupInformation user) { return getQueueAcls().get(acl).isUserAllowed(user); } @Override public ActiveUsersManager getAbstractUsersManager() { return activeUsersManager; } @Override public void recoverContainer(Resource clusterResource, SchedulerApplicationAttempt schedulerAttempt, RMContainer rmContainer) { if (rmContainer.getState().equals(RMContainerState.COMPLETED)) { return; } increaseUsedResources(rmContainer); updateAppHeadRoom(schedulerAttempt); updateAvailableResourcesMetrics(); } @Override public Set<String> getAccessibleNodeLabels() { // TODO add implementation for FIFO scheduler return null; } @Override public String getDefaultNodeLabelExpression() { // TODO add implementation for FIFO scheduler return null; } @Override public void incPendingResource(String nodeLabel, Resource resourceToInc) { } @Override public void decPendingResource(String nodeLabel, Resource resourceToDec) { } @Override public Priority getDefaultApplicationPriority() { // TODO add implementation for FIFO scheduler return null; } @Override public void incReservedResource(String partition, Resource reservedRes) { // TODO add implementation for FIFO scheduler } @Override public void decReservedResource(String partition, Resource reservedRes) { // TODO add implementation for FIFO scheduler } }; public FifoScheduler() { super(FifoScheduler.class.getName()); } private synchronized void initScheduler(Configuration conf) { validateConf(conf); //Use ConcurrentSkipListMap because applications need to be ordered this.applications = new ConcurrentSkipListMap<>(); this.minimumAllocation = super.getMinimumAllocation(); initMaximumResourceCapability(super.getMaximumAllocation()); this.usePortForNodeName = conf.getBoolean( YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, YarnConfiguration.DEFAULT_RM_SCHEDULER_USE_PORT_FOR_NODE_NAME); this.metrics = QueueMetrics.forQueue(DEFAULT_QUEUE_NAME, null, false, conf); this.activeUsersManager = new ActiveUsersManager(metrics); } @Override public void serviceInit(Configuration conf) throws Exception { initScheduler(conf); super.serviceInit(conf); // Initialize SchedulingMonitorManager schedulingMonitorManager.initialize(rmContext, conf); } @Override public void serviceStart() throws Exception { super.serviceStart(); } @Override public void serviceStop() throws Exception { super.serviceStop(); } @Override public synchronized void setConf(Configuration conf) { this.conf = conf; } private void validateConf(Configuration conf) { // validate scheduler memory allocation setting int minMem = conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB); int maxMem = conf.getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB); if (minMem <= 0 || minMem > maxMem) { throw new YarnRuntimeException("Invalid resource scheduler memory" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB + "=" + minMem + ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + "=" + maxMem + ", min and max should be greater than 0" + ", max should be no smaller than min."); } } @Override public synchronized Configuration getConf() { return conf; } @Override public int getNumClusterNodes() { return nodeTracker.nodeCount(); } @Override public synchronized void setRMContext(RMContext rmContext) { this.rmContext = rmContext; } @Override public synchronized void reinitialize(Configuration conf, RMContext rmContext) throws IOException { setConf(conf); super.reinitialize(conf, rmContext); } @Override public Allocation allocate(ApplicationAttemptId applicationAttemptId, List<ResourceRequest> ask, List<SchedulingRequest> schedulingRequests, List<ContainerId> release, List<String> blacklistAdditions, List<String> blacklistRemovals, ContainerUpdates updateRequests) { FifoAppAttempt application = getApplicationAttempt(applicationAttemptId); if (application == null) { LOG.error("Calling allocate on removed or non existent application " + applicationAttemptId.getApplicationId()); return EMPTY_ALLOCATION; } // The allocate may be the leftover from previous attempt, and it will // impact current attempt, such as confuse the request and allocation for // current attempt's AM container. // Note outside precondition check for the attempt id may be // outdated here, so double check it here is necessary. if (!application.getApplicationAttemptId().equals(applicationAttemptId)) { LOG.error("Calling allocate on previous or removed " + "or non existent application attempt " + applicationAttemptId); return EMPTY_ALLOCATION; } // Sanity check normalizeResourceRequests(ask); // Release containers releaseContainers(release, application); synchronized (application) { // make sure we aren't stopping/removing the application // when the allocate comes in if (application.isStopped()) { LOG.info("Calling allocate on a stopped " + "application " + applicationAttemptId); return EMPTY_ALLOCATION; } if (!ask.isEmpty()) { LOG.debug("allocate: pre-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); // Update application requests application.updateResourceRequests(ask); LOG.debug("allocate: post-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); LOG.debug("allocate:" + " applicationId=" + applicationAttemptId + " #ask=" + ask.size()); } application.updateBlacklist(blacklistAdditions, blacklistRemovals); Resource headroom = application.getHeadroom(); application.setApplicationHeadroomForMetrics(headroom); return new Allocation(application.pullNewlyAllocatedContainers(), headroom, null, null, null, application.pullUpdatedNMTokens()); } } @VisibleForTesting public synchronized void addApplication(ApplicationId applicationId, String queue, String user, boolean isAppRecovering, boolean unmanagedAM) { SchedulerApplication<FifoAppAttempt> application = new SchedulerApplication<>(DEFAULT_QUEUE, user, unmanagedAM); applications.put(applicationId, application); metrics.submitApp(user, unmanagedAM); LOG.info("Accepted application " + applicationId + " from user: " + user + ", currently num of applications: " + applications.size()); if (isAppRecovering) { LOG.debug("{} is recovering. Skip notifying APP_ACCEPTED", applicationId); } else { rmContext.getDispatcher().getEventHandler() .handle(new RMAppEvent(applicationId, RMAppEventType.APP_ACCEPTED)); } } @VisibleForTesting public synchronized void addApplicationAttempt(ApplicationAttemptId appAttemptId, boolean transferStateFromPreviousAttempt, boolean isAttemptRecovering) { SchedulerApplication<FifoAppAttempt> application = applications.get(appAttemptId.getApplicationId()); String user = application.getUser(); // TODO: Fix store FifoAppAttempt schedulerApp = new FifoAppAttempt(appAttemptId, user, DEFAULT_QUEUE, activeUsersManager, this.rmContext); if (transferStateFromPreviousAttempt) { schedulerApp.transferStateFromPreviousAttempt(application .getCurrentAppAttempt()); } application.setCurrentAppAttempt(schedulerApp); metrics.submitAppAttempt(user, application.isUnmanagedAM()); LOG.info("Added Application Attempt " + appAttemptId + " to scheduler from user " + application.getUser()); if (isAttemptRecovering) { LOG.debug("{} is recovering. Skipping notifying ATTEMPT_ADDED", appAttemptId); } else { rmContext.getDispatcher().getEventHandler().handle( new RMAppAttemptEvent(appAttemptId, RMAppAttemptEventType.ATTEMPT_ADDED)); } } private synchronized void doneApplication(ApplicationId applicationId, RMAppState finalState) { SchedulerApplication<FifoAppAttempt> application = applications.get(applicationId); if (application == null){ LOG.warn("Couldn't find application " + applicationId); return; } // Inform the activeUsersManager activeUsersManager.deactivateApplication(application.getUser(), applicationId); application.stop(finalState); applications.remove(applicationId); } private synchronized void doneApplicationAttempt( ApplicationAttemptId applicationAttemptId, RMAppAttemptState rmAppAttemptFinalState, boolean keepContainers) throws IOException { FifoAppAttempt attempt = getApplicationAttempt(applicationAttemptId); SchedulerApplication<FifoAppAttempt> application = applications.get(applicationAttemptId.getApplicationId()); if (application == null || attempt == null) { throw new IOException("Unknown application " + applicationAttemptId + " has completed!"); } // Kill all 'live' containers for (RMContainer container : attempt.getLiveContainers()) { if (keepContainers && container.getState().equals(RMContainerState.RUNNING)) { // do not kill the running container in the case of work-preserving AM // restart. LOG.info("Skip killing " + container.getContainerId()); continue; } super.completedContainer(container, SchedulerUtils.createAbnormalContainerStatus( container.getContainerId(), SchedulerUtils.COMPLETED_APPLICATION), RMContainerEventType.KILL); } // Clean up pending requests, metrics etc. attempt.stop(rmAppAttemptFinalState); } /** * Heart of the scheduler... * * @param node node on which resources are available to be allocated */ private void assignContainers(FiCaSchedulerNode node) { LOG.debug("assignContainers:" + " node=" + node.getRMNode().getNodeAddress() + " #applications=" + applications.size()); // Try to assign containers to applications in fifo order for (Map.Entry<ApplicationId, SchedulerApplication<FifoAppAttempt>> e : applications .entrySet()) { FifoAppAttempt application = e.getValue().getCurrentAppAttempt(); if (application == null) { continue; } LOG.debug("pre-assignContainers"); application.showRequests(); synchronized (application) { // Check if this resource is on the blacklist if (SchedulerAppUtils.isPlaceBlacklisted(application, node, LOG)) { continue; } for (SchedulerRequestKey schedulerKey : application.getSchedulerKeys()) { int maxContainers = getMaxAllocatableContainers(application, schedulerKey, node, NodeType.OFF_SWITCH); // Ensure the application needs containers of this priority if (maxContainers > 0) { int assignedContainers = assignContainersOnNode(node, application, schedulerKey); // Do not assign out of order w.r.t priorities if (assignedContainers == 0) { break; } } } } LOG.debug("post-assignContainers"); application.showRequests(); // Done if (Resources.lessThan(resourceCalculator, getClusterResource(), node.getUnallocatedResource(), minimumAllocation)) { break; } } // Update the applications' headroom to correctly take into // account the containers assigned in this update. for (SchedulerApplication<FifoAppAttempt> application : applications.values()) { FifoAppAttempt attempt = (FifoAppAttempt) application.getCurrentAppAttempt(); if (attempt == null) { continue; } updateAppHeadRoom(attempt); } } private int getMaxAllocatableContainers(FifoAppAttempt application, SchedulerRequestKey schedulerKey, FiCaSchedulerNode node, NodeType type) { PendingAsk offswitchAsk = application.getPendingAsk(schedulerKey, ResourceRequest.ANY); int maxContainers = offswitchAsk.getCount(); if (type == NodeType.OFF_SWITCH) { return maxContainers; } if (type == NodeType.RACK_LOCAL) { PendingAsk rackLocalAsk = application.getPendingAsk(schedulerKey, node.getRackName()); if (rackLocalAsk.getCount() <= 0) { return maxContainers; } maxContainers = Math.min(maxContainers, rackLocalAsk.getCount()); } if (type == NodeType.NODE_LOCAL) { PendingAsk nodeLocalAsk = application.getPendingAsk(schedulerKey, node.getRMNode().getHostName()); if (nodeLocalAsk.getCount() > 0) { maxContainers = Math.min(maxContainers, nodeLocalAsk.getCount()); } } return maxContainers; } private int assignContainersOnNode(FiCaSchedulerNode node, FifoAppAttempt application, SchedulerRequestKey schedulerKey ) { // Data-local int nodeLocalContainers = assignNodeLocalContainers(node, application, schedulerKey); // Rack-local int rackLocalContainers = assignRackLocalContainers(node, application, schedulerKey); // Off-switch int offSwitchContainers = assignOffSwitchContainers(node, application, schedulerKey); LOG.debug("assignContainersOnNode:" + " node=" + node.getRMNode().getNodeAddress() + " application=" + application.getApplicationId().getId() + " priority=" + schedulerKey.getPriority() + " #assigned=" + (nodeLocalContainers + rackLocalContainers + offSwitchContainers)); return (nodeLocalContainers + rackLocalContainers + offSwitchContainers); } private int assignNodeLocalContainers(FiCaSchedulerNode node, FifoAppAttempt application, SchedulerRequestKey schedulerKey) { int assignedContainers = 0; PendingAsk nodeLocalAsk = application.getPendingAsk(schedulerKey, node.getNodeName()); if (nodeLocalAsk.getCount() > 0) { // Don't allocate on this node if we don't need containers on this rack if (application.getOutstandingAsksCount(schedulerKey, node.getRackName()) <= 0) { return 0; } int assignableContainers = Math.min( getMaxAllocatableContainers(application, schedulerKey, node, NodeType.NODE_LOCAL), nodeLocalAsk.getCount()); assignedContainers = assignContainer(node, application, schedulerKey, assignableContainers, nodeLocalAsk.getPerAllocationResource(), NodeType.NODE_LOCAL); } return assignedContainers; } private int assignRackLocalContainers(FiCaSchedulerNode node, FifoAppAttempt application, SchedulerRequestKey schedulerKey) { int assignedContainers = 0; PendingAsk rackAsk = application.getPendingAsk(schedulerKey, node.getRMNode().getRackName()); if (rackAsk.getCount() > 0) { // Don't allocate on this rack if the application doens't need containers if (application.getOutstandingAsksCount(schedulerKey, ResourceRequest.ANY) <= 0) { return 0; } int assignableContainers = Math.min(getMaxAllocatableContainers(application, schedulerKey, node, NodeType.RACK_LOCAL), rackAsk.getCount()); assignedContainers = assignContainer(node, application, schedulerKey, assignableContainers, rackAsk.getPerAllocationResource(), NodeType.RACK_LOCAL); } return assignedContainers; } private int assignOffSwitchContainers(FiCaSchedulerNode node, FifoAppAttempt application, SchedulerRequestKey schedulerKey) { int assignedContainers = 0; PendingAsk offswitchAsk = application.getPendingAsk(schedulerKey, ResourceRequest.ANY); if (offswitchAsk.getCount() > 0) { assignedContainers = assignContainer(node, application, schedulerKey, offswitchAsk.getCount(), offswitchAsk.getPerAllocationResource(), NodeType.OFF_SWITCH); } return assignedContainers; } private int assignContainer(FiCaSchedulerNode node, FifoAppAttempt application, SchedulerRequestKey schedulerKey, int assignableContainers, Resource capability, NodeType type) { LOG.debug("assignContainers:" + " node=" + node.getRMNode().getNodeAddress() + " application=" + application.getApplicationId().getId() + " priority=" + schedulerKey.getPriority().getPriority() + " assignableContainers=" + assignableContainers + " capability=" + capability + " type=" + type); // TODO: A buggy application with this zero would crash the scheduler. int availableContainers = (int) (node.getUnallocatedResource().getMemorySize() / capability.getMemorySize()); int assignedContainers = Math.min(assignableContainers, availableContainers); if (assignedContainers > 0) { for (int i=0; i < assignedContainers; ++i) { NodeId nodeId = node.getRMNode().getNodeID(); ContainerId containerId = BuilderUtils.newContainerId(application .getApplicationAttemptId(), application.getNewContainerId()); // Create the container Container container = BuilderUtils.newContainer(containerId, nodeId, node.getRMNode().getHttpAddress(), capability, schedulerKey.getPriority(), null, schedulerKey.getAllocationRequestId()); // Allocate! // Inform the application RMContainer rmContainer = application.allocate(type, node, schedulerKey, container); // Inform the node node.allocateContainer(rmContainer); // Update usage for this container increaseUsedResources(rmContainer); } } return assignedContainers; } private void increaseUsedResources(RMContainer rmContainer) { Resources.addTo(usedResource, rmContainer.getAllocatedResource()); } private void updateAppHeadRoom(SchedulerApplicationAttempt schedulerAttempt) { schedulerAttempt.setHeadroom(Resources.subtract(getClusterResource(), usedResource)); } private void updateAvailableResourcesMetrics() { metrics.setAvailableResourcesToQueue( Resources.subtract(getClusterResource(), usedResource)); } @Override public void handle(SchedulerEvent event) { switch(event.getType()) { case NODE_ADDED: { NodeAddedSchedulerEvent nodeAddedEvent = (NodeAddedSchedulerEvent)event; addNode(nodeAddedEvent.getAddedRMNode()); recoverContainersOnNode(nodeAddedEvent.getContainerReports(), nodeAddedEvent.getAddedRMNode()); } break; case NODE_REMOVED: { NodeRemovedSchedulerEvent nodeRemovedEvent = (NodeRemovedSchedulerEvent)event; removeNode(nodeRemovedEvent.getRemovedRMNode()); } break; case NODE_RESOURCE_UPDATE: { NodeResourceUpdateSchedulerEvent nodeResourceUpdatedEvent = (NodeResourceUpdateSchedulerEvent)event; updateNodeResource(nodeResourceUpdatedEvent.getRMNode(), nodeResourceUpdatedEvent.getResourceOption()); } break; case NODE_UPDATE: { NodeUpdateSchedulerEvent nodeUpdatedEvent = (NodeUpdateSchedulerEvent)event; nodeUpdate(nodeUpdatedEvent.getRMNode()); } break; case APP_ADDED: { AppAddedSchedulerEvent appAddedEvent = (AppAddedSchedulerEvent) event; addApplication(appAddedEvent.getApplicationId(), appAddedEvent.getQueue(), appAddedEvent.getUser(), appAddedEvent.getIsAppRecovering(), appAddedEvent.isUnmanagedAM()); } break; case APP_REMOVED: { AppRemovedSchedulerEvent appRemovedEvent = (AppRemovedSchedulerEvent)event; doneApplication(appRemovedEvent.getApplicationID(), appRemovedEvent.getFinalState()); } break; case APP_ATTEMPT_ADDED: { AppAttemptAddedSchedulerEvent appAttemptAddedEvent = (AppAttemptAddedSchedulerEvent) event; addApplicationAttempt(appAttemptAddedEvent.getApplicationAttemptId(), appAttemptAddedEvent.getTransferStateFromPreviousAttempt(), appAttemptAddedEvent.getIsAttemptRecovering()); } break; case APP_ATTEMPT_REMOVED: { AppAttemptRemovedSchedulerEvent appAttemptRemovedEvent = (AppAttemptRemovedSchedulerEvent) event; try { doneApplicationAttempt( appAttemptRemovedEvent.getApplicationAttemptID(), appAttemptRemovedEvent.getFinalAttemptState(), appAttemptRemovedEvent.getKeepContainersAcrossAppAttempts()); } catch(IOException ie) { LOG.error("Unable to remove application " + appAttemptRemovedEvent.getApplicationAttemptID(), ie); } } break; case CONTAINER_EXPIRED: { ContainerExpiredSchedulerEvent containerExpiredEvent = (ContainerExpiredSchedulerEvent) event; ContainerId containerid = containerExpiredEvent.getContainerId(); super.completedContainer(getRMContainer(containerid), SchedulerUtils.createAbnormalContainerStatus( containerid, SchedulerUtils.EXPIRED_CONTAINER), RMContainerEventType.EXPIRE); } break; case RELEASE_CONTAINER: { if (!(event instanceof ReleaseContainerEvent)) { throw new RuntimeException("Unexpected event type: " + event); } RMContainer container = ((ReleaseContainerEvent) event).getContainer(); completedContainer(container, SchedulerUtils.createAbnormalContainerStatus( container.getContainerId(), SchedulerUtils.RELEASED_CONTAINER), RMContainerEventType.RELEASED); } break; default: LOG.error("Invalid eventtype " + event.getType() + ". Ignoring!"); } } @Lock(FifoScheduler.class) @Override protected synchronized void completedContainerInternal( RMContainer rmContainer, ContainerStatus containerStatus, RMContainerEventType event) { // Get the application for the finished container Container container = rmContainer.getContainer(); FifoAppAttempt application = getCurrentAttemptForContainer(container.getId()); ApplicationId appId = container.getId().getApplicationAttemptId().getApplicationId(); // Get the node on which the container was allocated FiCaSchedulerNode node = (FiCaSchedulerNode) getNode(container.getNodeId()); if (application == null) { LOG.info("Unknown application: " + appId + " released container " + container.getId() + " on node: " + node + " with event: " + event); return; } // Inform the application application.containerCompleted(rmContainer, containerStatus, event, RMNodeLabelsManager.NO_LABEL); // Inform the node node.releaseContainer(rmContainer.getContainerId(), false); // Update total usage Resources.subtractFrom(usedResource, container.getResource()); LOG.info("Application attempt " + application.getApplicationAttemptId() + " released container " + container.getId() + " on node: " + node + " with event: " + event); } private Resource usedResource = recordFactory.newRecordInstance(Resource.class); private synchronized void removeNode(RMNode nodeInfo) { FiCaSchedulerNode node = nodeTracker.getNode(nodeInfo.getNodeID()); if (node == null) { return; } // Kill running containers for(RMContainer container : node.getCopiedListOfRunningContainers()) { super.completedContainer(container, SchedulerUtils.createAbnormalContainerStatus( container.getContainerId(), SchedulerUtils.LOST_CONTAINER), RMContainerEventType.KILL); } nodeTracker.removeNode(nodeInfo.getNodeID()); } @Override public QueueInfo getQueueInfo(String queueName, boolean includeChildQueues, boolean recursive) { return DEFAULT_QUEUE.getQueueInfo(false, false); } @Override public List<QueueUserACLInfo> getQueueUserAclInfo() { return DEFAULT_QUEUE.getQueueUserAclInfo(null); } @Override public ResourceCalculator getResourceCalculator() { return resourceCalculator; } private synchronized void addNode(RMNode nodeManager) { FiCaSchedulerNode schedulerNode = new FiCaSchedulerNode(nodeManager, usePortForNodeName); nodeTracker.addNode(schedulerNode); } @Override public void recover(RMState state) { // NOT IMPLEMENTED } @Override public RMContainer getRMContainer(ContainerId containerId) { FifoAppAttempt attempt = getCurrentAttemptForContainer(containerId); return (attempt == null) ? null : attempt.getRMContainer(containerId); } @Override public QueueMetrics getRootQueueMetrics() { return DEFAULT_QUEUE.getMetrics(); } @Override public synchronized boolean checkAccess(UserGroupInformation callerUGI, QueueACL acl, String queueName) { return DEFAULT_QUEUE.hasAccess(acl, callerUGI); } @Override public synchronized List<ApplicationAttemptId> getAppsInQueue(String queueName) { if (queueName.equals(DEFAULT_QUEUE.getQueueName())) { List<ApplicationAttemptId> attempts = new ArrayList<ApplicationAttemptId>(applications.size()); for (SchedulerApplication<FifoAppAttempt> app : applications.values()) { attempts.add(app.getCurrentAppAttempt().getApplicationAttemptId()); } return attempts; } else { return null; } } public Resource getUsedResource() { return usedResource; } @Override protected synchronized void nodeUpdate(RMNode nm) { super.nodeUpdate(nm); FiCaSchedulerNode node = (FiCaSchedulerNode) getNode(nm.getNodeID()); if (rmContext.isWorkPreservingRecoveryEnabled() && !rmContext.isSchedulerReadyForAllocatingContainers()) { return; } // A decommissioned node might be removed before we get here if (node != null && Resources.greaterThanOrEqual(resourceCalculator, getClusterResource(), node.getUnallocatedResource(), minimumAllocation)) { LOG.debug("Node heartbeat " + nm.getNodeID() + " available resource = " + node.getUnallocatedResource()); assignContainers(node); LOG.debug("Node after allocation " + nm.getNodeID() + " resource = " + node.getUnallocatedResource()); } updateAvailableResourcesMetrics(); } @VisibleForTesting @Override public void killContainer(RMContainer container) { ContainerStatus status = SchedulerUtils.createKilledContainerStatus( container.getContainerId(), "Killed by RM to simulate an AM container failure"); LOG.info("Killing container " + container); completedContainer(container, status, RMContainerEventType.KILL); } @Override public synchronized void recoverContainersOnNode( List<NMContainerStatus> containerReports, RMNode nm) { super.recoverContainersOnNode(containerReports, nm); } }
googleapis/google-cloud-java
37,047
java-aiplatform/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/TensorboardServiceSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.aiplatform.v1beta1; import static com.google.cloud.aiplatform.v1beta1.TensorboardServiceClient.ExportTensorboardTimeSeriesDataPagedResponse; import static com.google.cloud.aiplatform.v1beta1.TensorboardServiceClient.ListLocationsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.TensorboardServiceClient.ListTensorboardExperimentsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.TensorboardServiceClient.ListTensorboardRunsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.TensorboardServiceClient.ListTensorboardTimeSeriesPagedResponse; import static com.google.cloud.aiplatform.v1beta1.TensorboardServiceClient.ListTensorboardsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.aiplatform.v1beta1.stub.TensorboardServiceStubSettings; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link TensorboardServiceClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (aiplatform.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getTensorboard: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * TensorboardServiceSettings.Builder tensorboardServiceSettingsBuilder = * TensorboardServiceSettings.newBuilder(); * tensorboardServiceSettingsBuilder * .getTensorboardSettings() * .setRetrySettings( * tensorboardServiceSettingsBuilder * .getTensorboardSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * TensorboardServiceSettings tensorboardServiceSettings = * tensorboardServiceSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createTensorboard: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * TensorboardServiceSettings.Builder tensorboardServiceSettingsBuilder = * TensorboardServiceSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * tensorboardServiceSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @BetaApi @Generated("by gapic-generator-java") public class TensorboardServiceSettings extends ClientSettings<TensorboardServiceSettings> { /** Returns the object with the settings used for calls to createTensorboard. */ public UnaryCallSettings<CreateTensorboardRequest, Operation> createTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).createTensorboardSettings(); } /** Returns the object with the settings used for calls to createTensorboard. */ public OperationCallSettings< CreateTensorboardRequest, Tensorboard, CreateTensorboardOperationMetadata> createTensorboardOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .createTensorboardOperationSettings(); } /** Returns the object with the settings used for calls to getTensorboard. */ public UnaryCallSettings<GetTensorboardRequest, Tensorboard> getTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardSettings(); } /** Returns the object with the settings used for calls to updateTensorboard. */ public UnaryCallSettings<UpdateTensorboardRequest, Operation> updateTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).updateTensorboardSettings(); } /** Returns the object with the settings used for calls to updateTensorboard. */ public OperationCallSettings< UpdateTensorboardRequest, Tensorboard, UpdateTensorboardOperationMetadata> updateTensorboardOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .updateTensorboardOperationSettings(); } /** Returns the object with the settings used for calls to listTensorboards. */ public PagedCallSettings< ListTensorboardsRequest, ListTensorboardsResponse, ListTensorboardsPagedResponse> listTensorboardsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listTensorboardsSettings(); } /** Returns the object with the settings used for calls to deleteTensorboard. */ public UnaryCallSettings<DeleteTensorboardRequest, Operation> deleteTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).deleteTensorboardSettings(); } /** Returns the object with the settings used for calls to deleteTensorboard. */ public OperationCallSettings<DeleteTensorboardRequest, Empty, DeleteOperationMetadata> deleteTensorboardOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardOperationSettings(); } /** Returns the object with the settings used for calls to readTensorboardUsage. */ public UnaryCallSettings<ReadTensorboardUsageRequest, ReadTensorboardUsageResponse> readTensorboardUsageSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).readTensorboardUsageSettings(); } /** Returns the object with the settings used for calls to readTensorboardSize. */ public UnaryCallSettings<ReadTensorboardSizeRequest, ReadTensorboardSizeResponse> readTensorboardSizeSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).readTensorboardSizeSettings(); } /** Returns the object with the settings used for calls to createTensorboardExperiment. */ public UnaryCallSettings<CreateTensorboardExperimentRequest, TensorboardExperiment> createTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .createTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to getTensorboardExperiment. */ public UnaryCallSettings<GetTensorboardExperimentRequest, TensorboardExperiment> getTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to updateTensorboardExperiment. */ public UnaryCallSettings<UpdateTensorboardExperimentRequest, TensorboardExperiment> updateTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .updateTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to listTensorboardExperiments. */ public PagedCallSettings< ListTensorboardExperimentsRequest, ListTensorboardExperimentsResponse, ListTensorboardExperimentsPagedResponse> listTensorboardExperimentsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .listTensorboardExperimentsSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardExperiment. */ public UnaryCallSettings<DeleteTensorboardExperimentRequest, Operation> deleteTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardExperiment. */ public OperationCallSettings<DeleteTensorboardExperimentRequest, Empty, DeleteOperationMetadata> deleteTensorboardExperimentOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardExperimentOperationSettings(); } /** Returns the object with the settings used for calls to createTensorboardRun. */ public UnaryCallSettings<CreateTensorboardRunRequest, TensorboardRun> createTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).createTensorboardRunSettings(); } /** Returns the object with the settings used for calls to batchCreateTensorboardRuns. */ public UnaryCallSettings<BatchCreateTensorboardRunsRequest, BatchCreateTensorboardRunsResponse> batchCreateTensorboardRunsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .batchCreateTensorboardRunsSettings(); } /** Returns the object with the settings used for calls to getTensorboardRun. */ public UnaryCallSettings<GetTensorboardRunRequest, TensorboardRun> getTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardRunSettings(); } /** Returns the object with the settings used for calls to updateTensorboardRun. */ public UnaryCallSettings<UpdateTensorboardRunRequest, TensorboardRun> updateTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).updateTensorboardRunSettings(); } /** Returns the object with the settings used for calls to listTensorboardRuns. */ public PagedCallSettings< ListTensorboardRunsRequest, ListTensorboardRunsResponse, ListTensorboardRunsPagedResponse> listTensorboardRunsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listTensorboardRunsSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardRun. */ public UnaryCallSettings<DeleteTensorboardRunRequest, Operation> deleteTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).deleteTensorboardRunSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardRun. */ public OperationCallSettings<DeleteTensorboardRunRequest, Empty, DeleteOperationMetadata> deleteTensorboardRunOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardRunOperationSettings(); } /** Returns the object with the settings used for calls to batchCreateTensorboardTimeSeries. */ public UnaryCallSettings< BatchCreateTensorboardTimeSeriesRequest, BatchCreateTensorboardTimeSeriesResponse> batchCreateTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .batchCreateTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to createTensorboardTimeSeries. */ public UnaryCallSettings<CreateTensorboardTimeSeriesRequest, TensorboardTimeSeries> createTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .createTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to getTensorboardTimeSeries. */ public UnaryCallSettings<GetTensorboardTimeSeriesRequest, TensorboardTimeSeries> getTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to updateTensorboardTimeSeries. */ public UnaryCallSettings<UpdateTensorboardTimeSeriesRequest, TensorboardTimeSeries> updateTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .updateTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to listTensorboardTimeSeries. */ public PagedCallSettings< ListTensorboardTimeSeriesRequest, ListTensorboardTimeSeriesResponse, ListTensorboardTimeSeriesPagedResponse> listTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardTimeSeries. */ public UnaryCallSettings<DeleteTensorboardTimeSeriesRequest, Operation> deleteTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardTimeSeries. */ public OperationCallSettings<DeleteTensorboardTimeSeriesRequest, Empty, DeleteOperationMetadata> deleteTensorboardTimeSeriesOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardTimeSeriesOperationSettings(); } /** Returns the object with the settings used for calls to batchReadTensorboardTimeSeriesData. */ public UnaryCallSettings< BatchReadTensorboardTimeSeriesDataRequest, BatchReadTensorboardTimeSeriesDataResponse> batchReadTensorboardTimeSeriesDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .batchReadTensorboardTimeSeriesDataSettings(); } /** Returns the object with the settings used for calls to readTensorboardTimeSeriesData. */ public UnaryCallSettings< ReadTensorboardTimeSeriesDataRequest, ReadTensorboardTimeSeriesDataResponse> readTensorboardTimeSeriesDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .readTensorboardTimeSeriesDataSettings(); } /** Returns the object with the settings used for calls to readTensorboardBlobData. */ public ServerStreamingCallSettings< ReadTensorboardBlobDataRequest, ReadTensorboardBlobDataResponse> readTensorboardBlobDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).readTensorboardBlobDataSettings(); } /** Returns the object with the settings used for calls to writeTensorboardExperimentData. */ public UnaryCallSettings< WriteTensorboardExperimentDataRequest, WriteTensorboardExperimentDataResponse> writeTensorboardExperimentDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .writeTensorboardExperimentDataSettings(); } /** Returns the object with the settings used for calls to writeTensorboardRunData. */ public UnaryCallSettings<WriteTensorboardRunDataRequest, WriteTensorboardRunDataResponse> writeTensorboardRunDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).writeTensorboardRunDataSettings(); } /** Returns the object with the settings used for calls to exportTensorboardTimeSeriesData. */ public PagedCallSettings< ExportTensorboardTimeSeriesDataRequest, ExportTensorboardTimeSeriesDataResponse, ExportTensorboardTimeSeriesDataPagedResponse> exportTensorboardTimeSeriesDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .exportTensorboardTimeSeriesDataSettings(); } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listLocationsSettings(); } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getLocationSettings(); } /** Returns the object with the settings used for calls to setIamPolicy. */ public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() { return ((TensorboardServiceStubSettings) getStubSettings()).setIamPolicySettings(); } /** Returns the object with the settings used for calls to getIamPolicy. */ public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getIamPolicySettings(); } /** Returns the object with the settings used for calls to testIamPermissions. */ public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).testIamPermissionsSettings(); } public static final TensorboardServiceSettings create(TensorboardServiceStubSettings stub) throws IOException { return new TensorboardServiceSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return TensorboardServiceStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return TensorboardServiceStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return TensorboardServiceStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return TensorboardServiceStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return TensorboardServiceStubSettings.defaultGrpcTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return TensorboardServiceStubSettings.defaultTransportChannelProvider(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return TensorboardServiceStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected TensorboardServiceSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for TensorboardServiceSettings. */ public static class Builder extends ClientSettings.Builder<TensorboardServiceSettings, Builder> { protected Builder() throws IOException { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(TensorboardServiceStubSettings.newBuilder(clientContext)); } protected Builder(TensorboardServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(TensorboardServiceStubSettings.Builder stubSettings) { super(stubSettings); } private static Builder createDefault() { return new Builder(TensorboardServiceStubSettings.newBuilder()); } public TensorboardServiceStubSettings.Builder getStubSettingsBuilder() { return ((TensorboardServiceStubSettings.Builder) getStubSettings()); } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to createTensorboard. */ public UnaryCallSettings.Builder<CreateTensorboardRequest, Operation> createTensorboardSettings() { return getStubSettingsBuilder().createTensorboardSettings(); } /** Returns the builder for the settings used for calls to createTensorboard. */ public OperationCallSettings.Builder< CreateTensorboardRequest, Tensorboard, CreateTensorboardOperationMetadata> createTensorboardOperationSettings() { return getStubSettingsBuilder().createTensorboardOperationSettings(); } /** Returns the builder for the settings used for calls to getTensorboard. */ public UnaryCallSettings.Builder<GetTensorboardRequest, Tensorboard> getTensorboardSettings() { return getStubSettingsBuilder().getTensorboardSettings(); } /** Returns the builder for the settings used for calls to updateTensorboard. */ public UnaryCallSettings.Builder<UpdateTensorboardRequest, Operation> updateTensorboardSettings() { return getStubSettingsBuilder().updateTensorboardSettings(); } /** Returns the builder for the settings used for calls to updateTensorboard. */ public OperationCallSettings.Builder< UpdateTensorboardRequest, Tensorboard, UpdateTensorboardOperationMetadata> updateTensorboardOperationSettings() { return getStubSettingsBuilder().updateTensorboardOperationSettings(); } /** Returns the builder for the settings used for calls to listTensorboards. */ public PagedCallSettings.Builder< ListTensorboardsRequest, ListTensorboardsResponse, ListTensorboardsPagedResponse> listTensorboardsSettings() { return getStubSettingsBuilder().listTensorboardsSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboard. */ public UnaryCallSettings.Builder<DeleteTensorboardRequest, Operation> deleteTensorboardSettings() { return getStubSettingsBuilder().deleteTensorboardSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboard. */ public OperationCallSettings.Builder<DeleteTensorboardRequest, Empty, DeleteOperationMetadata> deleteTensorboardOperationSettings() { return getStubSettingsBuilder().deleteTensorboardOperationSettings(); } /** Returns the builder for the settings used for calls to readTensorboardUsage. */ public UnaryCallSettings.Builder<ReadTensorboardUsageRequest, ReadTensorboardUsageResponse> readTensorboardUsageSettings() { return getStubSettingsBuilder().readTensorboardUsageSettings(); } /** Returns the builder for the settings used for calls to readTensorboardSize. */ public UnaryCallSettings.Builder<ReadTensorboardSizeRequest, ReadTensorboardSizeResponse> readTensorboardSizeSettings() { return getStubSettingsBuilder().readTensorboardSizeSettings(); } /** Returns the builder for the settings used for calls to createTensorboardExperiment. */ public UnaryCallSettings.Builder<CreateTensorboardExperimentRequest, TensorboardExperiment> createTensorboardExperimentSettings() { return getStubSettingsBuilder().createTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to getTensorboardExperiment. */ public UnaryCallSettings.Builder<GetTensorboardExperimentRequest, TensorboardExperiment> getTensorboardExperimentSettings() { return getStubSettingsBuilder().getTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to updateTensorboardExperiment. */ public UnaryCallSettings.Builder<UpdateTensorboardExperimentRequest, TensorboardExperiment> updateTensorboardExperimentSettings() { return getStubSettingsBuilder().updateTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to listTensorboardExperiments. */ public PagedCallSettings.Builder< ListTensorboardExperimentsRequest, ListTensorboardExperimentsResponse, ListTensorboardExperimentsPagedResponse> listTensorboardExperimentsSettings() { return getStubSettingsBuilder().listTensorboardExperimentsSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardExperiment. */ public UnaryCallSettings.Builder<DeleteTensorboardExperimentRequest, Operation> deleteTensorboardExperimentSettings() { return getStubSettingsBuilder().deleteTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardExperiment. */ public OperationCallSettings.Builder< DeleteTensorboardExperimentRequest, Empty, DeleteOperationMetadata> deleteTensorboardExperimentOperationSettings() { return getStubSettingsBuilder().deleteTensorboardExperimentOperationSettings(); } /** Returns the builder for the settings used for calls to createTensorboardRun. */ public UnaryCallSettings.Builder<CreateTensorboardRunRequest, TensorboardRun> createTensorboardRunSettings() { return getStubSettingsBuilder().createTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to batchCreateTensorboardRuns. */ public UnaryCallSettings.Builder< BatchCreateTensorboardRunsRequest, BatchCreateTensorboardRunsResponse> batchCreateTensorboardRunsSettings() { return getStubSettingsBuilder().batchCreateTensorboardRunsSettings(); } /** Returns the builder for the settings used for calls to getTensorboardRun. */ public UnaryCallSettings.Builder<GetTensorboardRunRequest, TensorboardRun> getTensorboardRunSettings() { return getStubSettingsBuilder().getTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to updateTensorboardRun. */ public UnaryCallSettings.Builder<UpdateTensorboardRunRequest, TensorboardRun> updateTensorboardRunSettings() { return getStubSettingsBuilder().updateTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to listTensorboardRuns. */ public PagedCallSettings.Builder< ListTensorboardRunsRequest, ListTensorboardRunsResponse, ListTensorboardRunsPagedResponse> listTensorboardRunsSettings() { return getStubSettingsBuilder().listTensorboardRunsSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardRun. */ public UnaryCallSettings.Builder<DeleteTensorboardRunRequest, Operation> deleteTensorboardRunSettings() { return getStubSettingsBuilder().deleteTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardRun. */ public OperationCallSettings.Builder< DeleteTensorboardRunRequest, Empty, DeleteOperationMetadata> deleteTensorboardRunOperationSettings() { return getStubSettingsBuilder().deleteTensorboardRunOperationSettings(); } /** Returns the builder for the settings used for calls to batchCreateTensorboardTimeSeries. */ public UnaryCallSettings.Builder< BatchCreateTensorboardTimeSeriesRequest, BatchCreateTensorboardTimeSeriesResponse> batchCreateTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().batchCreateTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to createTensorboardTimeSeries. */ public UnaryCallSettings.Builder<CreateTensorboardTimeSeriesRequest, TensorboardTimeSeries> createTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().createTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to getTensorboardTimeSeries. */ public UnaryCallSettings.Builder<GetTensorboardTimeSeriesRequest, TensorboardTimeSeries> getTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().getTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to updateTensorboardTimeSeries. */ public UnaryCallSettings.Builder<UpdateTensorboardTimeSeriesRequest, TensorboardTimeSeries> updateTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().updateTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to listTensorboardTimeSeries. */ public PagedCallSettings.Builder< ListTensorboardTimeSeriesRequest, ListTensorboardTimeSeriesResponse, ListTensorboardTimeSeriesPagedResponse> listTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().listTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardTimeSeries. */ public UnaryCallSettings.Builder<DeleteTensorboardTimeSeriesRequest, Operation> deleteTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().deleteTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardTimeSeries. */ public OperationCallSettings.Builder< DeleteTensorboardTimeSeriesRequest, Empty, DeleteOperationMetadata> deleteTensorboardTimeSeriesOperationSettings() { return getStubSettingsBuilder().deleteTensorboardTimeSeriesOperationSettings(); } /** * Returns the builder for the settings used for calls to batchReadTensorboardTimeSeriesData. */ public UnaryCallSettings.Builder< BatchReadTensorboardTimeSeriesDataRequest, BatchReadTensorboardTimeSeriesDataResponse> batchReadTensorboardTimeSeriesDataSettings() { return getStubSettingsBuilder().batchReadTensorboardTimeSeriesDataSettings(); } /** Returns the builder for the settings used for calls to readTensorboardTimeSeriesData. */ public UnaryCallSettings.Builder< ReadTensorboardTimeSeriesDataRequest, ReadTensorboardTimeSeriesDataResponse> readTensorboardTimeSeriesDataSettings() { return getStubSettingsBuilder().readTensorboardTimeSeriesDataSettings(); } /** Returns the builder for the settings used for calls to readTensorboardBlobData. */ public ServerStreamingCallSettings.Builder< ReadTensorboardBlobDataRequest, ReadTensorboardBlobDataResponse> readTensorboardBlobDataSettings() { return getStubSettingsBuilder().readTensorboardBlobDataSettings(); } /** Returns the builder for the settings used for calls to writeTensorboardExperimentData. */ public UnaryCallSettings.Builder< WriteTensorboardExperimentDataRequest, WriteTensorboardExperimentDataResponse> writeTensorboardExperimentDataSettings() { return getStubSettingsBuilder().writeTensorboardExperimentDataSettings(); } /** Returns the builder for the settings used for calls to writeTensorboardRunData. */ public UnaryCallSettings.Builder< WriteTensorboardRunDataRequest, WriteTensorboardRunDataResponse> writeTensorboardRunDataSettings() { return getStubSettingsBuilder().writeTensorboardRunDataSettings(); } /** Returns the builder for the settings used for calls to exportTensorboardTimeSeriesData. */ public PagedCallSettings.Builder< ExportTensorboardTimeSeriesDataRequest, ExportTensorboardTimeSeriesDataResponse, ExportTensorboardTimeSeriesDataPagedResponse> exportTensorboardTimeSeriesDataSettings() { return getStubSettingsBuilder().exportTensorboardTimeSeriesDataSettings(); } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return getStubSettingsBuilder().listLocationsSettings(); } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getStubSettingsBuilder().getLocationSettings(); } /** Returns the builder for the settings used for calls to setIamPolicy. */ public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() { return getStubSettingsBuilder().setIamPolicySettings(); } /** Returns the builder for the settings used for calls to getIamPolicy. */ public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() { return getStubSettingsBuilder().getIamPolicySettings(); } /** Returns the builder for the settings used for calls to testIamPermissions. */ public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return getStubSettingsBuilder().testIamPermissionsSettings(); } @Override public TensorboardServiceSettings build() throws IOException { return new TensorboardServiceSettings(this); } } }
google/j2objc
37,237
jre_emul/android/platform/libcore/support/src/test/java/org/apache/harmony/security/tests/support/TestKeyPair.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Vladimir N. Molotkov * @version $Revision$ */ package org.apache.harmony.security.tests.support; import java.security.KeyFactory; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.spec.InvalidKeySpecException; import java.security.spec.PKCS8EncodedKeySpec; import java.security.spec.X509EncodedKeySpec; import java.util.HashMap; /** * Generates key pairs based on their encodings for some algorithms. * Encodings generated using * BEA JRockit j2sdk1.4.2_04 (http://www.bea.com) */ public class TestKeyPair { private static final HashMap<String, byte []> privateKeyEncoding = new HashMap<String, byte[]>(); private static final HashMap<String, byte []> publicKeyEncoding = new HashMap<String, byte[]>(); private final String algorithmName; private final KeyFactory kf; static { privateKeyEncoding.put("RSA", new byte[] { (byte)0x30, (byte)0x82, (byte)0x02, (byte)0x77, (byte)0x02, (byte)0x01, (byte)0x00, (byte)0x30, (byte)0x0d, (byte)0x06, (byte)0x09, (byte)0x2a, (byte)0x86, (byte)0x48, (byte)0x86, (byte)0xf7, (byte)0x0d, (byte)0x01, (byte)0x01, (byte)0x01, (byte)0x05, (byte)0x00, (byte)0x04, (byte)0x82, (byte)0x02, (byte)0x61, (byte)0x30, (byte)0x82, (byte)0x02, (byte)0x5d, (byte)0x02, (byte)0x01, (byte)0x00, (byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xb2, (byte)0x4a, (byte)0x9b, (byte)0x5b, (byte)0xba, (byte)0x01, (byte)0xc0, (byte)0xcd, (byte)0x65, (byte)0x09, (byte)0x63, (byte)0x70, (byte)0x0b, (byte)0x5a, (byte)0x1b, (byte)0x92, (byte)0x08, (byte)0xf8, (byte)0x55, (byte)0x5e, (byte)0x7c, (byte)0x1b, (byte)0x50, (byte)0x17, (byte)0xec, (byte)0x44, (byte)0x4c, (byte)0x58, (byte)0x42, (byte)0x2b, (byte)0x41, (byte)0x09, (byte)0x59, (byte)0xf2, (byte)0xe1, (byte)0x5d, (byte)0x43, (byte)0x71, (byte)0x4d, (byte)0x92, (byte)0x03, (byte)0x1d, (byte)0xb6, (byte)0x6c, (byte)0x7f, (byte)0x5d, (byte)0x48, (byte)0xcd, (byte)0x17, (byte)0xec, (byte)0xd7, (byte)0x4c, (byte)0x39, (byte)0xb1, (byte)0x7b, (byte)0xe2, (byte)0xbf, (byte)0x96, (byte)0x77, (byte)0xbe, (byte)0xd0, (byte)0xa0, (byte)0xf0, (byte)0x2d, (byte)0x6b, (byte)0x24, (byte)0xaa, (byte)0x14, (byte)0xba, (byte)0x82, (byte)0x79, (byte)0x10, (byte)0x9b, (byte)0x16, (byte)0x68, (byte)0x47, (byte)0x81, (byte)0x54, (byte)0xa2, (byte)0xfa, (byte)0x91, (byte)0x9e, (byte)0x0a, (byte)0x2a, (byte)0x53, (byte)0xa6, (byte)0xe7, (byte)0x9e, (byte)0x7d, (byte)0x29, (byte)0x33, (byte)0xd8, (byte)0x05, (byte)0xfc, (byte)0x02, (byte)0x3f, (byte)0xbd, (byte)0xc7, (byte)0x6e, (byte)0xed, (byte)0xaa, (byte)0x30, (byte)0x6c, (byte)0x5f, (byte)0x52, (byte)0xed, (byte)0x35, (byte)0x65, (byte)0x4b, (byte)0x0e, (byte)0xc8, (byte)0xa7, (byte)0x12, (byte)0x10, (byte)0x56, (byte)0x37, (byte)0xaf, (byte)0x11, (byte)0xfa, (byte)0x21, (byte)0x0e, (byte)0x99, (byte)0xff, (byte)0xfa, (byte)0x8c, (byte)0x65, (byte)0x8e, (byte)0x6d, (byte)0x02, (byte)0x03, (byte)0x01, (byte)0x00, (byte)0x01, (byte)0x02, (byte)0x81, (byte)0x80, (byte)0x78, (byte)0x41, (byte)0x72, (byte)0x40, (byte)0x90, (byte)0x59, (byte)0x96, (byte)0x5d, (byte)0xf3, (byte)0x84, (byte)0x3d, (byte)0x99, (byte)0xd9, (byte)0x4e, (byte)0x51, (byte)0xc2, (byte)0x52, (byte)0x62, (byte)0x8d, (byte)0xd2, (byte)0x49, (byte)0x0b, (byte)0x73, (byte)0x1e, (byte)0x6f, (byte)0xb2, (byte)0x31, (byte)0x7c, (byte)0x66, (byte)0x45, (byte)0x1e, (byte)0x7c, (byte)0xdc, (byte)0x3a, (byte)0xc2, (byte)0x5f, (byte)0x51, (byte)0x9a, (byte)0x1e, (byte)0xa4, (byte)0x19, (byte)0x8d, (byte)0xf4, (byte)0xf9, (byte)0x81, (byte)0x7e, (byte)0xbe, (byte)0x17, (byte)0xf7, (byte)0xc7, (byte)0x3c, (byte)0x00, (byte)0xa1, (byte)0xf9, (byte)0x60, (byte)0x82, (byte)0x34, (byte)0x8f, (byte)0x9c, (byte)0xfd, (byte)0x0b, (byte)0x63, (byte)0x42, (byte)0x1b, (byte)0x7f, (byte)0x45, (byte)0xf1, (byte)0x31, (byte)0xc3, (byte)0x63, (byte)0x47, (byte)0x5c, (byte)0xc1, (byte)0xb2, (byte)0x5f, (byte)0x57, (byte)0xee, (byte)0x02, (byte)0x9f, (byte)0x5e, (byte)0x08, (byte)0x48, (byte)0xba, (byte)0x74, (byte)0xba, (byte)0x81, (byte)0xb7, (byte)0x30, (byte)0xac, (byte)0x4c, (byte)0x01, (byte)0x35, (byte)0xce, (byte)0x46, (byte)0x47, (byte)0x8c, (byte)0xe4, (byte)0x62, (byte)0x36, (byte)0x1a, (byte)0x65, (byte)0x0e, (byte)0x33, (byte)0x56, (byte)0xf9, (byte)0xb7, (byte)0xa0, (byte)0xc4, (byte)0xb6, (byte)0x82, (byte)0x55, (byte)0x7d, (byte)0x36, (byte)0x55, (byte)0xc0, (byte)0x52, (byte)0x5e, (byte)0x35, (byte)0x54, (byte)0xbd, (byte)0x97, (byte)0x01, (byte)0x00, (byte)0xbf, (byte)0x10, (byte)0xdc, (byte)0x1b, (byte)0x51, (byte)0x02, (byte)0x41, (byte)0x00, (byte)0xe7, (byte)0x68, (byte)0x03, (byte)0x3e, (byte)0x21, (byte)0x64, (byte)0x68, (byte)0x24, (byte)0x7b, (byte)0xd0, (byte)0x31, (byte)0xa0, (byte)0xa2, (byte)0xd9, (byte)0x87, (byte)0x6d, (byte)0x79, (byte)0x81, (byte)0x8f, (byte)0x8f, (byte)0x2d, (byte)0x7a, (byte)0x95, (byte)0x2e, (byte)0x55, (byte)0x9f, (byte)0xd7, (byte)0x86, (byte)0x29, (byte)0x93, (byte)0xbd, (byte)0x04, (byte)0x7e, (byte)0x4f, (byte)0xdb, (byte)0x56, (byte)0xf1, (byte)0x75, (byte)0xd0, (byte)0x4b, (byte)0x00, (byte)0x3a, (byte)0xe0, (byte)0x26, (byte)0xf6, (byte)0xab, (byte)0x9e, (byte)0x0b, (byte)0x2a, (byte)0xf4, (byte)0xa8, (byte)0xd7, (byte)0xff, (byte)0xbe, (byte)0x01, (byte)0xeb, (byte)0x9b, (byte)0x81, (byte)0xc7, (byte)0x5f, (byte)0x02, (byte)0x73, (byte)0xe1, (byte)0x2b, (byte)0x02, (byte)0x41, (byte)0x00, (byte)0xc5, (byte)0x3d, (byte)0x78, (byte)0xab, (byte)0xe6, (byte)0xab, (byte)0x3e, (byte)0x29, (byte)0xfd, (byte)0x98, (byte)0xd0, (byte)0xa4, (byte)0x3e, (byte)0x58, (byte)0xee, (byte)0x48, (byte)0x45, (byte)0xa3, (byte)0x66, (byte)0xac, (byte)0xe9, (byte)0x4d, (byte)0xbd, (byte)0x60, (byte)0xea, (byte)0x24, (byte)0xff, (byte)0xed, (byte)0x0c, (byte)0x67, (byte)0xc5, (byte)0xfd, (byte)0x36, (byte)0x28, (byte)0xea, (byte)0x74, (byte)0x88, (byte)0xd1, (byte)0xd1, (byte)0xad, (byte)0x58, (byte)0xd7, (byte)0xf0, (byte)0x67, (byte)0x20, (byte)0xc1, (byte)0xe3, (byte)0xb3, (byte)0xdb, (byte)0x52, (byte)0xad, (byte)0xf3, (byte)0xc4, (byte)0x21, (byte)0xd8, (byte)0x8c, (byte)0x4c, (byte)0x41, (byte)0x27, (byte)0xdb, (byte)0xd0, (byte)0x35, (byte)0x92, (byte)0xc7, (byte)0x02, (byte)0x41, (byte)0x00, (byte)0xe0, (byte)0x99, (byte)0x42, (byte)0xb4, (byte)0x76, (byte)0x02, (byte)0x97, (byte)0x55, (byte)0xf9, (byte)0xda, (byte)0x3b, (byte)0xa0, (byte)0xd7, (byte)0x0e, (byte)0xdc, (byte)0xf4, (byte)0x33, (byte)0x7f, (byte)0xbd, (byte)0xcf, (byte)0xd0, (byte)0xeb, (byte)0x6e, (byte)0x89, (byte)0xf7, (byte)0x4f, (byte)0x5a, (byte)0x07, (byte)0x7c, (byte)0xa9, (byte)0x49, (byte)0x47, (byte)0x68, (byte)0x35, (byte)0xa8, (byte)0x05, (byte)0x3d, (byte)0xfd, (byte)0x04, (byte)0x7b, (byte)0x17, (byte)0x31, (byte)0x0d, (byte)0xc8, (byte)0xa3, (byte)0x98, (byte)0x34, (byte)0xa0, (byte)0x50, (byte)0x44, (byte)0x00, (byte)0xf1, (byte)0x0c, (byte)0xe6, (byte)0xe5, (byte)0xc4, (byte)0x41, (byte)0x3d, (byte)0xf8, (byte)0x3d, (byte)0x4e, (byte)0x0b, (byte)0x1c, (byte)0xdb, (byte)0x02, (byte)0x41, (byte)0x00, (byte)0x82, (byte)0x9b, (byte)0x8a, (byte)0xfd, (byte)0xa1, (byte)0x98, (byte)0x41, (byte)0x68, (byte)0xc2, (byte)0xd1, (byte)0xdf, (byte)0x4e, (byte)0xf3, (byte)0x2e, (byte)0x26, (byte)0x53, (byte)0x5b, (byte)0x31, (byte)0xb1, (byte)0x7a, (byte)0xcc, (byte)0x5e, (byte)0xbb, (byte)0x09, (byte)0xa2, (byte)0xe2, (byte)0x6f, (byte)0x4a, (byte)0x04, (byte)0x0d, (byte)0xef, (byte)0x90, (byte)0x15, (byte)0xbe, (byte)0x10, (byte)0x4a, (byte)0xac, (byte)0x92, (byte)0xeb, (byte)0xda, (byte)0x72, (byte)0xdb, (byte)0x43, (byte)0x08, (byte)0xb7, (byte)0x2b, (byte)0x4c, (byte)0xe1, (byte)0xbb, (byte)0x58, (byte)0xcb, (byte)0x71, (byte)0x80, (byte)0xad, (byte)0xbc, (byte)0xdc, (byte)0x62, (byte)0x5e, (byte)0x3e, (byte)0xcb, (byte)0x92, (byte)0xda, (byte)0xf6, (byte)0xdf, (byte)0x02, (byte)0x40, (byte)0x4d, (byte)0x81, (byte)0x90, (byte)0xc5, (byte)0x77, (byte)0x30, (byte)0xb7, (byte)0x29, (byte)0x00, (byte)0xa8, (byte)0xf1, (byte)0xb4, (byte)0xae, (byte)0x52, (byte)0x63, (byte)0x00, (byte)0xb2, (byte)0x2d, (byte)0x3e, (byte)0x7d, (byte)0xd6, (byte)0x4d, (byte)0xf9, (byte)0x8a, (byte)0xc1, (byte)0xb1, (byte)0x98, (byte)0x89, (byte)0x52, (byte)0x40, (byte)0x14, (byte)0x1b, (byte)0x0e, (byte)0x61, (byte)0x8f, (byte)0xf4, (byte)0xbe, (byte)0x59, (byte)0x79, (byte)0x79, (byte)0x95, (byte)0x19, (byte)0x5c, (byte)0x51, (byte)0x08, (byte)0x66, (byte)0xc1, (byte)0x42, (byte)0x30, (byte)0xb3, (byte)0x7a, (byte)0x86, (byte)0x9f, (byte)0x3e, (byte)0xf5, (byte)0x19, (byte)0xa3, (byte)0xae, (byte)0x64, (byte)0x69, (byte)0x14, (byte)0x07, (byte)0x50, (byte)0x97 }); publicKeyEncoding.put("RSA", new byte[] { (byte)0x30, (byte)0x81, (byte)0x9f, (byte)0x30, (byte)0x0d, (byte)0x06, (byte)0x09, (byte)0x2a, (byte)0x86, (byte)0x48, (byte)0x86, (byte)0xf7, (byte)0x0d, (byte)0x01, (byte)0x01, (byte)0x01, (byte)0x05, (byte)0x00, (byte)0x03, (byte)0x81, (byte)0x8d, (byte)0x00, (byte)0x30, (byte)0x81, (byte)0x89, (byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xb2, (byte)0x4a, (byte)0x9b, (byte)0x5b, (byte)0xba, (byte)0x01, (byte)0xc0, (byte)0xcd, (byte)0x65, (byte)0x09, (byte)0x63, (byte)0x70, (byte)0x0b, (byte)0x5a, (byte)0x1b, (byte)0x92, (byte)0x08, (byte)0xf8, (byte)0x55, (byte)0x5e, (byte)0x7c, (byte)0x1b, (byte)0x50, (byte)0x17, (byte)0xec, (byte)0x44, (byte)0x4c, (byte)0x58, (byte)0x42, (byte)0x2b, (byte)0x41, (byte)0x09, (byte)0x59, (byte)0xf2, (byte)0xe1, (byte)0x5d, (byte)0x43, (byte)0x71, (byte)0x4d, (byte)0x92, (byte)0x03, (byte)0x1d, (byte)0xb6, (byte)0x6c, (byte)0x7f, (byte)0x5d, (byte)0x48, (byte)0xcd, (byte)0x17, (byte)0xec, (byte)0xd7, (byte)0x4c, (byte)0x39, (byte)0xb1, (byte)0x7b, (byte)0xe2, (byte)0xbf, (byte)0x96, (byte)0x77, (byte)0xbe, (byte)0xd0, (byte)0xa0, (byte)0xf0, (byte)0x2d, (byte)0x6b, (byte)0x24, (byte)0xaa, (byte)0x14, (byte)0xba, (byte)0x82, (byte)0x79, (byte)0x10, (byte)0x9b, (byte)0x16, (byte)0x68, (byte)0x47, (byte)0x81, (byte)0x54, (byte)0xa2, (byte)0xfa, (byte)0x91, (byte)0x9e, (byte)0x0a, (byte)0x2a, (byte)0x53, (byte)0xa6, (byte)0xe7, (byte)0x9e, (byte)0x7d, (byte)0x29, (byte)0x33, (byte)0xd8, (byte)0x05, (byte)0xfc, (byte)0x02, (byte)0x3f, (byte)0xbd, (byte)0xc7, (byte)0x6e, (byte)0xed, (byte)0xaa, (byte)0x30, (byte)0x6c, (byte)0x5f, (byte)0x52, (byte)0xed, (byte)0x35, (byte)0x65, (byte)0x4b, (byte)0x0e, (byte)0xc8, (byte)0xa7, (byte)0x12, (byte)0x10, (byte)0x56, (byte)0x37, (byte)0xaf, (byte)0x11, (byte)0xfa, (byte)0x21, (byte)0x0e, (byte)0x99, (byte)0xff, (byte)0xfa, (byte)0x8c, (byte)0x65, (byte)0x8e, (byte)0x6d, (byte)0x02, (byte)0x03, (byte)0x01, (byte)0x00, (byte)0x01 }); privateKeyEncoding.put("DSA", new byte[] { (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x4a, (byte)0x02, (byte)0x01, (byte)0x00, (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x2b, (byte)0x06, (byte)0x07, (byte)0x2a, (byte)0x86, (byte)0x48, (byte)0xce, (byte)0x38, (byte)0x04, (byte)0x01, (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x1e, (byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xca, (byte)0x84, (byte)0x1d, (byte)0xa3, (byte)0xab, (byte)0xb9, (byte)0x98, (byte)0xf4, (byte)0x61, (byte)0x8b, (byte)0x66, (byte)0xdb, (byte)0x4e, (byte)0x3a, (byte)0xb2, (byte)0x11, (byte)0x4e, (byte)0xa9, (byte)0xda, (byte)0x35, (byte)0x91, (byte)0xc9, (byte)0x4e, (byte)0xc3, (byte)0x16, (byte)0xa7, (byte)0xed, (byte)0xb8, (byte)0x8f, (byte)0xd7, (byte)0xea, (byte)0xea, (byte)0xdb, (byte)0x77, (byte)0xe1, (byte)0x77, (byte)0x7a, (byte)0xc9, (byte)0xf3, (byte)0x37, (byte)0x33, (byte)0x01, (byte)0x72, (byte)0xbc, (byte)0xd0, (byte)0x89, (byte)0x9b, (byte)0x18, (byte)0xfd, (byte)0x84, (byte)0xd6, (byte)0xe9, (byte)0xbf, (byte)0x13, (byte)0x35, (byte)0x5e, (byte)0x40, (byte)0xf6, (byte)0x9d, (byte)0xd9, (byte)0x1a, (byte)0xba, (byte)0xa9, (byte)0xc3, (byte)0x8c, (byte)0xe3, (byte)0x95, (byte)0xc8, (byte)0xdf, (byte)0x2e, (byte)0x41, (byte)0xa1, (byte)0xbf, (byte)0xde, (byte)0x5d, (byte)0xad, (byte)0x21, (byte)0xcc, (byte)0x0d, (byte)0x42, (byte)0x56, (byte)0xa0, (byte)0x32, (byte)0xc0, (byte)0x90, (byte)0x73, (byte)0x3e, (byte)0xa4, (byte)0x0e, (byte)0x58, (byte)0xe4, (byte)0x64, (byte)0x00, (byte)0xa3, (byte)0x27, (byte)0x49, (byte)0x56, (byte)0xb2, (byte)0x43, (byte)0xbc, (byte)0x72, (byte)0xa8, (byte)0xd2, (byte)0x26, (byte)0x89, (byte)0x35, (byte)0x37, (byte)0x29, (byte)0x8d, (byte)0x21, (byte)0xb5, (byte)0x8e, (byte)0x59, (byte)0xfa, (byte)0x9e, (byte)0xdf, (byte)0x37, (byte)0x0d, (byte)0x9e, (byte)0xab, (byte)0xfd, (byte)0xbf, (byte)0x1a, (byte)0x9e, (byte)0xf3, (byte)0xe8, (byte)0x3a, (byte)0xfb, (byte)0x02, (byte)0x15, (byte)0x00, (byte)0xa2, (byte)0x4e, (byte)0x5d, (byte)0xe3, (byte)0x10, (byte)0x5d, (byte)0xa9, (byte)0x3a, (byte)0x6a, (byte)0x4d, (byte)0x07, (byte)0x3b, (byte)0xab, (byte)0xca, (byte)0x7d, (byte)0x09, (byte)0xd6, (byte)0x06, (byte)0x79, (byte)0x49, (byte)0x02, (byte)0x81, (byte)0x80, (byte)0x5a, (byte)0x91, (byte)0x83, (byte)0x1c, (byte)0x04, (byte)0x33, (byte)0xca, (byte)0x25, (byte)0xb0, (byte)0x68, (byte)0xb3, (byte)0xb3, (byte)0xab, (byte)0x55, (byte)0x29, (byte)0x33, (byte)0x4d, (byte)0xa9, (byte)0x33, (byte)0x39, (byte)0xef, (byte)0x71, (byte)0xca, (byte)0x95, (byte)0xf3, (byte)0xd8, (byte)0x27, (byte)0x56, (byte)0x5f, (byte)0x42, (byte)0xda, (byte)0x36, (byte)0x83, (byte)0xc5, (byte)0xf1, (byte)0x53, (byte)0x62, (byte)0xa5, (byte)0xdc, (byte)0xe6, (byte)0x4e, (byte)0x69, (byte)0x45, (byte)0x71, (byte)0x1a, (byte)0x4a, (byte)0xc3, (byte)0xf4, (byte)0x7f, (byte)0x0a, (byte)0xd1, (byte)0x78, (byte)0xed, (byte)0xbe, (byte)0x6e, (byte)0xa6, (byte)0x36, (byte)0x34, (byte)0x4e, (byte)0xc3, (byte)0x1b, (byte)0x17, (byte)0xaa, (byte)0xa4, (byte)0x76, (byte)0x44, (byte)0x46, (byte)0xaf, (byte)0x26, (byte)0x16, (byte)0x14, (byte)0xfb, (byte)0x9f, (byte)0x5d, (byte)0x08, (byte)0xaf, (byte)0x92, (byte)0xdb, (byte)0xba, (byte)0xd0, (byte)0xcb, (byte)0x8b, (byte)0x1e, (byte)0xc3, (byte)0x8b, (byte)0x36, (byte)0x3b, (byte)0x4c, (byte)0x02, (byte)0xc3, (byte)0x66, (byte)0x28, (byte)0x69, (byte)0xd0, (byte)0x74, (byte)0x4f, (byte)0x1c, (byte)0x4f, (byte)0x97, (byte)0x75, (byte)0x7f, (byte)0x9e, (byte)0x89, (byte)0x80, (byte)0xcf, (byte)0xb2, (byte)0x17, (byte)0xd6, (byte)0x66, (byte)0x91, (byte)0x12, (byte)0x3a, (byte)0xb0, (byte)0x3c, (byte)0x3c, (byte)0xc2, (byte)0x31, (byte)0xd1, (byte)0x31, (byte)0x2a, (byte)0x35, (byte)0xbe, (byte)0x9d, (byte)0x54, (byte)0x71, (byte)0x03, (byte)0xcb, (byte)0xcc, (byte)0x04, (byte)0x16, (byte)0x02, (byte)0x14, (byte)0x52, (byte)0xfb, (byte)0xf9, (byte)0x12, (byte)0x40, (byte)0x05, (byte)0x59, (byte)0x8f, (byte)0xde, (byte)0x9d, (byte)0xac, (byte)0xa1, (byte)0xe2, (byte)0xed, (byte)0x56, (byte)0x62, (byte)0x5f, (byte)0x56, (byte)0x67, (byte)0x74 }); publicKeyEncoding.put("DSA", new byte[] { (byte)0x30, (byte)0x82, (byte)0x01, (byte)0xb7, (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x2b, (byte)0x06, (byte)0x07, (byte)0x2a, (byte)0x86, (byte)0x48, (byte)0xce, (byte)0x38, (byte)0x04, (byte)0x01, (byte)0x30, (byte)0x82, (byte)0x01, (byte)0x1e, (byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00, (byte)0xca, (byte)0x84, (byte)0x1d, (byte)0xa3, (byte)0xab, (byte)0xb9, (byte)0x98, (byte)0xf4, (byte)0x61, (byte)0x8b, (byte)0x66, (byte)0xdb, (byte)0x4e, (byte)0x3a, (byte)0xb2, (byte)0x11, (byte)0x4e, (byte)0xa9, (byte)0xda, (byte)0x35, (byte)0x91, (byte)0xc9, (byte)0x4e, (byte)0xc3, (byte)0x16, (byte)0xa7, (byte)0xed, (byte)0xb8, (byte)0x8f, (byte)0xd7, (byte)0xea, (byte)0xea, (byte)0xdb, (byte)0x77, (byte)0xe1, (byte)0x77, (byte)0x7a, (byte)0xc9, (byte)0xf3, (byte)0x37, (byte)0x33, (byte)0x01, (byte)0x72, (byte)0xbc, (byte)0xd0, (byte)0x89, (byte)0x9b, (byte)0x18, (byte)0xfd, (byte)0x84, (byte)0xd6, (byte)0xe9, (byte)0xbf, (byte)0x13, (byte)0x35, (byte)0x5e, (byte)0x40, (byte)0xf6, (byte)0x9d, (byte)0xd9, (byte)0x1a, (byte)0xba, (byte)0xa9, (byte)0xc3, (byte)0x8c, (byte)0xe3, (byte)0x95, (byte)0xc8, (byte)0xdf, (byte)0x2e, (byte)0x41, (byte)0xa1, (byte)0xbf, (byte)0xde, (byte)0x5d, (byte)0xad, (byte)0x21, (byte)0xcc, (byte)0x0d, (byte)0x42, (byte)0x56, (byte)0xa0, (byte)0x32, (byte)0xc0, (byte)0x90, (byte)0x73, (byte)0x3e, (byte)0xa4, (byte)0x0e, (byte)0x58, (byte)0xe4, (byte)0x64, (byte)0x00, (byte)0xa3, (byte)0x27, (byte)0x49, (byte)0x56, (byte)0xb2, (byte)0x43, (byte)0xbc, (byte)0x72, (byte)0xa8, (byte)0xd2, (byte)0x26, (byte)0x89, (byte)0x35, (byte)0x37, (byte)0x29, (byte)0x8d, (byte)0x21, (byte)0xb5, (byte)0x8e, (byte)0x59, (byte)0xfa, (byte)0x9e, (byte)0xdf, (byte)0x37, (byte)0x0d, (byte)0x9e, (byte)0xab, (byte)0xfd, (byte)0xbf, (byte)0x1a, (byte)0x9e, (byte)0xf3, (byte)0xe8, (byte)0x3a, (byte)0xfb, (byte)0x02, (byte)0x15, (byte)0x00, (byte)0xa2, (byte)0x4e, (byte)0x5d, (byte)0xe3, (byte)0x10, (byte)0x5d, (byte)0xa9, (byte)0x3a, (byte)0x6a, (byte)0x4d, (byte)0x07, (byte)0x3b, (byte)0xab, (byte)0xca, (byte)0x7d, (byte)0x09, (byte)0xd6, (byte)0x06, (byte)0x79, (byte)0x49, (byte)0x02, (byte)0x81, (byte)0x80, (byte)0x5a, (byte)0x91, (byte)0x83, (byte)0x1c, (byte)0x04, (byte)0x33, (byte)0xca, (byte)0x25, (byte)0xb0, (byte)0x68, (byte)0xb3, (byte)0xb3, (byte)0xab, (byte)0x55, (byte)0x29, (byte)0x33, (byte)0x4d, (byte)0xa9, (byte)0x33, (byte)0x39, (byte)0xef, (byte)0x71, (byte)0xca, (byte)0x95, (byte)0xf3, (byte)0xd8, (byte)0x27, (byte)0x56, (byte)0x5f, (byte)0x42, (byte)0xda, (byte)0x36, (byte)0x83, (byte)0xc5, (byte)0xf1, (byte)0x53, (byte)0x62, (byte)0xa5, (byte)0xdc, (byte)0xe6, (byte)0x4e, (byte)0x69, (byte)0x45, (byte)0x71, (byte)0x1a, (byte)0x4a, (byte)0xc3, (byte)0xf4, (byte)0x7f, (byte)0x0a, (byte)0xd1, (byte)0x78, (byte)0xed, (byte)0xbe, (byte)0x6e, (byte)0xa6, (byte)0x36, (byte)0x34, (byte)0x4e, (byte)0xc3, (byte)0x1b, (byte)0x17, (byte)0xaa, (byte)0xa4, (byte)0x76, (byte)0x44, (byte)0x46, (byte)0xaf, (byte)0x26, (byte)0x16, (byte)0x14, (byte)0xfb, (byte)0x9f, (byte)0x5d, (byte)0x08, (byte)0xaf, (byte)0x92, (byte)0xdb, (byte)0xba, (byte)0xd0, (byte)0xcb, (byte)0x8b, (byte)0x1e, (byte)0xc3, (byte)0x8b, (byte)0x36, (byte)0x3b, (byte)0x4c, (byte)0x02, (byte)0xc3, (byte)0x66, (byte)0x28, (byte)0x69, (byte)0xd0, (byte)0x74, (byte)0x4f, (byte)0x1c, (byte)0x4f, (byte)0x97, (byte)0x75, (byte)0x7f, (byte)0x9e, (byte)0x89, (byte)0x80, (byte)0xcf, (byte)0xb2, (byte)0x17, (byte)0xd6, (byte)0x66, (byte)0x91, (byte)0x12, (byte)0x3a, (byte)0xb0, (byte)0x3c, (byte)0x3c, (byte)0xc2, (byte)0x31, (byte)0xd1, (byte)0x31, (byte)0x2a, (byte)0x35, (byte)0xbe, (byte)0x9d, (byte)0x54, (byte)0x71, (byte)0x03, (byte)0xcb, (byte)0xcc, (byte)0x03, (byte)0x81, (byte)0x85, (byte)0x00, (byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00, (byte)0x95, (byte)0xcc, (byte)0x11, (byte)0xd4, (byte)0x53, (byte)0x3d, (byte)0x9c, (byte)0x5c, (byte)0x73, (byte)0xf4, (byte)0x70, (byte)0xf0, (byte)0xe1, (byte)0xac, (byte)0xe3, (byte)0x2c, (byte)0x32, (byte)0x16, (byte)0x1d, (byte)0x34, (byte)0x1a, (byte)0x38, (byte)0x63, (byte)0x69, (byte)0x1a, (byte)0x72, (byte)0x39, (byte)0x4e, (byte)0x41, (byte)0x50, (byte)0xfa, (byte)0xdc, (byte)0x78, (byte)0xa4, (byte)0xb8, (byte)0x17, (byte)0x5a, (byte)0xe4, (byte)0xf9, (byte)0xa2, (byte)0x52, (byte)0x41, (byte)0x85, (byte)0xab, (byte)0x3f, (byte)0xf4, (byte)0x73, (byte)0x2e, (byte)0xae, (byte)0xa9, (byte)0x21, (byte)0x8b, (byte)0x5e, (byte)0x95, (byte)0x15, (byte)0xa2, (byte)0x86, (byte)0x63, (byte)0x0d, (byte)0xba, (byte)0x01, (byte)0xcb, (byte)0xe3, (byte)0x68, (byte)0xc6, (byte)0xaf, (byte)0x56, (byte)0x51, (byte)0x7b, (byte)0xa8, (byte)0x85, (byte)0x3f, (byte)0x01, (byte)0x80, (byte)0x8b, (byte)0x1f, (byte)0xb4, (byte)0x4c, (byte)0x93, (byte)0x6b, (byte)0x42, (byte)0xa6, (byte)0xbd, (byte)0x67, (byte)0x2a, (byte)0x95, (byte)0x05, (byte)0xff, (byte)0x03, (byte)0x2e, (byte)0x6f, (byte)0xd4, (byte)0xd3, (byte)0xf0, (byte)0x17, (byte)0xde, (byte)0xcb, (byte)0x7d, (byte)0xd9, (byte)0x42, (byte)0x4d, (byte)0x97, (byte)0x2c, (byte)0x53, (byte)0xe6, (byte)0x39, (byte)0x61, (byte)0xd2, (byte)0x69, (byte)0xd1, (byte)0x1c, (byte)0x9a, (byte)0x8b, (byte)0x5b, (byte)0x9c, (byte)0xfa, (byte)0xfa, (byte)0x50, (byte)0x50, (byte)0xbb, (byte)0xe4, (byte)0x2e, (byte)0x83, (byte)0x06, (byte)0x08, (byte)0x96, (byte)0x2a, (byte)0x68 }); privateKeyEncoding.put("DH", new byte[] { (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffffe1, (byte) 0x2, (byte) 0x1, (byte) 0x0, (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffff97, (byte) 0x6, (byte) 0x9, (byte) 0x2a, (byte) 0xffffff86, (byte) 0x48, (byte) 0xffffff86, (byte) 0xfffffff7, (byte) 0xd, (byte) 0x1, (byte) 0x3, (byte) 0x1, (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffff89, (byte) 0x2, (byte) 0x41, (byte) 0x0, (byte) 0xfffffff0, (byte) 0xffffffaa, (byte) 0x22, (byte) 0x5a, (byte) 0x29, (byte) 0xffffffb2, (byte) 0x3f, (byte) 0xffffffc9, (byte) 0xb, (byte) 0xffffff87, (byte) 0x5d, (byte) 0xffffff91, (byte) 0x51, (byte) 0x1, (byte) 0xffffffa4, (byte) 0xffffffb9, (byte) 0x4e, (byte) 0x1e, (byte) 0xffffff85, (byte) 0xfffffffc, (byte) 0xffffffa6, (byte) 0x5a, (byte) 0xffffff96, (byte) 0xffffffb1, (byte) 0xffffffcb, (byte) 0xffffff81, (byte) 0xffffffa3, (byte) 0x6e, (byte) 0xffffff90, (byte) 0xffffffbd, (byte) 0xffffffa2, (byte) 0xe, (byte) 0xffffffb4, (byte) 0xffffffba, (byte) 0x2c, (byte) 0x45, (byte) 0x9, (byte) 0x1c, (byte) 0xffffff98, (byte) 0x39, (byte) 0x26, (byte) 0x24, (byte) 0x40, (byte) 0xffffff80, (byte) 0xffffffce, (byte) 0x15, (byte) 0xffffff8b, (byte) 0xffffffe1, (byte) 0x67, (byte) 0x48, (byte) 0xfffffff3, (byte) 0x70, (byte) 0xffffff98, (byte) 0xffffffca, (byte) 0xffffffa7, (byte) 0x71, (byte) 0x33, (byte) 0xffffffb6, (byte) 0x4, (byte) 0x13, (byte) 0xffffffe5, (byte) 0x61, (byte) 0x3c, (byte) 0x1f, (byte) 0x2, (byte) 0x40, (byte) 0x1e, (byte) 0xffffffd8, (byte) 0x6f, (byte) 0xffffffce, (byte) 0x23, (byte) 0x71, (byte) 0x6a, (byte) 0x2a, (byte) 0xffffffa3, (byte) 0x4d, (byte) 0x62, (byte) 0xffffffe9, (byte) 0x5f, (byte) 0x17, (byte) 0xffffffa8, (byte) 0xffffffe8, (byte) 0xffffffaa, (byte) 0xffffff8a, (byte) 0xffffff95, (byte) 0x26, (byte) 0x7c, (byte) 0x38, (byte) 0xffffffa9, (byte) 0x2b, (byte) 0x48, (byte) 0x5a, (byte) 0x16, (byte) 0x19, (byte) 0xfffffffa, (byte) 0xffffff83, (byte) 0xffffffb8, (byte) 0x76, (byte) 0xffffffaf, (byte) 0xffffffb8, (byte) 0x62, (byte) 0x72, (byte) 0x45, (byte) 0xffffff9f, (byte) 0xffffff95, (byte) 0x1e, (byte) 0x62, (byte) 0x36, (byte) 0xffffff97, (byte) 0xffffffbf, (byte) 0xffffffab, (byte) 0x20, (byte) 0xffffffb0, (byte) 0x61, (byte) 0xffffffc5, (byte) 0x21, (byte) 0xffffff9e, (byte) 0xffffffe4, (byte) 0xffffffde, (byte) 0xffffff91, (byte) 0x1c, (byte) 0x6a, (byte) 0x7, (byte) 0x48, (byte) 0x77, (byte) 0x70, (byte) 0x1d, (byte) 0xffffffff, (byte) 0x58, (byte) 0x23, (byte) 0x2, (byte) 0x2, (byte) 0x1, (byte) 0xffffffff, (byte) 0x4, (byte) 0x42, (byte) 0x2, (byte) 0x40, (byte) 0x69, (byte) 0xffffff86, (byte) 0x48, (byte) 0x57, (byte) 0xffffffbf, (byte) 0xffffffde, (byte) 0x8, (byte) 0xffffffc6, (byte) 0x24, (byte) 0x6d, (byte) 0xf, (byte) 0x20, (byte) 0xffffff94, (byte) 0x4a, (byte) 0x22, (byte) 0x6e, (byte) 0x24, (byte) 0x60, (byte) 0xffffffd9, (byte) 0xffffffa9, (byte) 0xffffffbd, (byte) 0x1e, (byte) 0x64, (byte) 0xffffff89, (byte) 0xffffff83, (byte) 0x3c, (byte) 0xffffffe7, (byte) 0x70, (byte) 0x24, (byte) 0xffffffe1, (byte) 0xffffff8f, (byte) 0x3c, (byte) 0x4d, (byte) 0x39, (byte) 0x5f, (byte) 0xffffff9e, (byte) 0xffffff93, (byte) 0x13, (byte) 0xffffff86, (byte) 0xffffffe9, (byte) 0xffffff80, (byte) 0xf, (byte) 0xffffffc4, (byte) 0x41, (byte) 0xffffff8b, (byte) 0xfffffff4, (byte) 0xffffff8b, (byte) 0x65, (byte) 0xffffffa4, (byte) 0x1b, (byte) 0xd, (byte) 0x4, (byte) 0x48, (byte) 0x40, (byte) 0xffffffd6, (byte) 0xffffffa2, (byte) 0x0, (byte) 0xffffff85, (byte) 0xffffffe9, (byte) 0xffffffc4, (byte) 0x77, (byte) 0xffffffb2, (byte) 0x25, (byte) 0xffffffd8 }); publicKeyEncoding.put("DH", new byte[] { (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffffe0, (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffff97, (byte) 0x6, (byte) 0x9, (byte) 0x2a, (byte) 0xffffff86, (byte) 0x48, (byte) 0xffffff86, (byte) 0xfffffff7, (byte) 0xd, (byte) 0x1, (byte) 0x3, (byte) 0x1, (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffff89, (byte) 0x2, (byte) 0x41, (byte) 0x0, (byte) 0xfffffff0, (byte) 0xffffffaa, (byte) 0x22, (byte) 0x5a, (byte) 0x29, (byte) 0xffffffb2, (byte) 0x3f, (byte) 0xffffffc9, (byte) 0xb, (byte) 0xffffff87, (byte) 0x5d, (byte) 0xffffff91, (byte) 0x51, (byte) 0x1, (byte) 0xffffffa4, (byte) 0xffffffb9, (byte) 0x4e, (byte) 0x1e, (byte) 0xffffff85, (byte) 0xfffffffc, (byte) 0xffffffa6, (byte) 0x5a, (byte) 0xffffff96, (byte) 0xffffffb1, (byte) 0xffffffcb, (byte) 0xffffff81, (byte) 0xffffffa3, (byte) 0x6e, (byte) 0xffffff90, (byte) 0xffffffbd, (byte) 0xffffffa2, (byte) 0xe, (byte) 0xffffffb4, (byte) 0xffffffba, (byte) 0x2c, (byte) 0x45, (byte) 0x9, (byte) 0x1c, (byte) 0xffffff98, (byte) 0x39, (byte) 0x26, (byte) 0x24, (byte) 0x40, (byte) 0xffffff80, (byte) 0xffffffce, (byte) 0x15, (byte) 0xffffff8b, (byte) 0xffffffe1, (byte) 0x67, (byte) 0x48, (byte) 0xfffffff3, (byte) 0x70, (byte) 0xffffff98, (byte) 0xffffffca, (byte) 0xffffffa7, (byte) 0x71, (byte) 0x33, (byte) 0xffffffb6, (byte) 0x4, (byte) 0x13, (byte) 0xffffffe5, (byte) 0x61, (byte) 0x3c, (byte) 0x1f, (byte) 0x2, (byte) 0x40, (byte) 0x1e, (byte) 0xffffffd8, (byte) 0x6f, (byte) 0xffffffce, (byte) 0x23, (byte) 0x71, (byte) 0x6a, (byte) 0x2a, (byte) 0xffffffa3, (byte) 0x4d, (byte) 0x62, (byte) 0xffffffe9, (byte) 0x5f, (byte) 0x17, (byte) 0xffffffa8, (byte) 0xffffffe8, (byte) 0xffffffaa, (byte) 0xffffff8a, (byte) 0xffffff95, (byte) 0x26, (byte) 0x7c, (byte) 0x38, (byte) 0xffffffa9, (byte) 0x2b, (byte) 0x48, (byte) 0x5a, (byte) 0x16, (byte) 0x19, (byte) 0xfffffffa, (byte) 0xffffff83, (byte) 0xffffffb8, (byte) 0x76, (byte) 0xffffffaf, (byte) 0xffffffb8, (byte) 0x62, (byte) 0x72, (byte) 0x45, (byte) 0xffffff9f, (byte) 0xffffff95, (byte) 0x1e, (byte) 0x62, (byte) 0x36, (byte) 0xffffff97, (byte) 0xffffffbf, (byte) 0xffffffab, (byte) 0x20, (byte) 0xffffffb0, (byte) 0x61, (byte) 0xffffffc5, (byte) 0x21, (byte) 0xffffff9e, (byte) 0xffffffe4, (byte) 0xffffffde, (byte) 0xffffff91, (byte) 0x1c, (byte) 0x6a, (byte) 0x7, (byte) 0x48, (byte) 0x77, (byte) 0x70, (byte) 0x1d, (byte) 0xffffffff, (byte) 0x58, (byte) 0x23, (byte) 0x2, (byte) 0x2, (byte) 0x1, (byte) 0xffffffff, (byte) 0x3, (byte) 0x44, (byte) 0x0, (byte) 0x2, (byte) 0x41, (byte) 0x0, (byte) 0xffffff9d, (byte) 0xffffffc4, (byte) 0xffffffcd, (byte) 0x10, (byte) 0xffffffdf, (byte) 0x66, (byte) 0xffffff92, (byte) 0xffffffe1, (byte) 0x33, (byte) 0xffffffb1, (byte) 0xffffffc9, (byte) 0xffffff9f, (byte) 0xffffffb7, (byte) 0xffffffdd, (byte) 0xffffff84, (byte) 0x4b, (byte) 0xffffffe5, (byte) 0xffffff86, (byte) 0xfffffff0, (byte) 0x53, (byte) 0x2a, (byte) 0xffffffd5, (byte) 0xffffffc6, (byte) 0x15, (byte) 0xffffff94, (byte) 0xffffffae, (byte) 0x13, (byte) 0x7b, (byte) 0xffffff9d, (byte) 0x37, (byte) 0xffffff8b, (byte) 0xffffffc6, (byte) 0xffffffc6, (byte) 0x78, (byte) 0xffffff9c, (byte) 0x60, (byte) 0xffffff8a, (byte) 0x6f, (byte) 0x35, (byte) 0x39, (byte) 0xffffffe0, (byte) 0x78, (byte) 0x33, (byte) 0x60, (byte) 0xffffff89, (byte) 0x30, (byte) 0x61, (byte) 0xffffff84, (byte) 0xffffff8a, (byte) 0xffffffbc, (byte) 0xffffff80, (byte) 0x6c, (byte) 0x1c, (byte) 0x55, (byte) 0xffffff96, (byte) 0x50, (byte) 0xffffffb1, (byte) 0xffffff96, (byte) 0x5, (byte) 0x21, (byte) 0x65, (byte) 0x55, (byte) 0xffffffbb, (byte) 0xffffffa4 }); } public TestKeyPair(String algorithmName) throws NoSuchAlgorithmException { this.algorithmName = algorithmName; if (!privateKeyEncoding.containsKey(this.algorithmName)) { throw new NoSuchAlgorithmException("Encoded form not available for " + this.algorithmName); } kf = KeyFactory.getInstance(this.algorithmName); } public PublicKey getPublic() throws InvalidKeySpecException { return kf.generatePublic( new X509EncodedKeySpec( (byte[])publicKeyEncoding.get(algorithmName))); } public PrivateKey getPrivate() throws InvalidKeySpecException { return kf.generatePrivate( new PKCS8EncodedKeySpec( (byte[])privateKeyEncoding.get(algorithmName))); } }
oracle/coherence
36,596
prj/coherence-core-components/src/main/java/com/tangosol/coherence/component/net/management/model/localModel/WrapperModel.java
/* * Copyright (c) 2000, 2023, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ // ---- class: com.tangosol.coherence.component.net.management.model.localModel.WrapperModel package com.tangosol.coherence.component.net.management.model.localModel; import com.tangosol.coherence.component.net.management.model.RemoteModel; import com.tangosol.net.management.annotation.Description; import com.tangosol.util.Base; import com.tangosol.util.ClassHelper; import com.tangosol.util.ExternalizableHelper; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.WeakHashMap; import javax.management.Attribute; import javax.management.DynamicMBean; import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; import javax.management.MBeanOperationInfo; import javax.management.MXBean; import javax.management.NotCompliantMBeanException; import javax.management.NotificationBroadcaster; import javax.management.NotificationEmitter; /** * Model components implement the JMX-managed functionality of the * corresponding MBeans without being dependent on any JMX classes and could be * used both in-process and out-of-process (relative to an MBeanServer). * * The LocalModel components operate in two distinct modes: live and snapshot. * In the live mode all model methods call corresponding methods on managed * objects. The snapshot mode uses the _SnapshotMap to keep the attribute * values. * * Every time a remote invocation is used by the RemoteModel to do a * setAttribute or invoke call, the snapshot model is refreshed. * * A WrapperModel can be used to manage any object that can be classified as * either a Dynamic or Standard MBean (as defined by the JMX specification). */ @SuppressWarnings({"deprecation", "rawtypes", "unused", "unchecked", "ConstantConditions", "DuplicatedCode", "ForLoopReplaceableByForEach", "IfCanBeSwitch", "RedundantArrayCreation", "RedundantSuppression", "SameParameterValue", "TryFinallyCanBeTryWithResources", "TryWithIdenticalCatches", "UnnecessaryBoxing", "UnnecessaryUnboxing", "UnusedAssignment"}) public class WrapperModel extends com.tangosol.coherence.component.net.management.model.LocalModel implements javax.management.NotificationListener { // ---- Fields declarations ---- /** * Property _NotificationInfo * */ private javax.management.MBeanNotificationInfo[] __m__NotificationInfo; /** * Property _Notify * * flag that is true if the model supports notifications. */ private transient boolean __m__Notify; /** * Property Dynamic * * Determines if an MBean represented by this Model is a Dynamic MBean. */ private transient boolean __m_Dynamic; /** * Property MBean * * The MBean wrapped by this WrapperModel. */ private transient Object __m_MBean; /** * Property MBeanInfo * * The MBeanInfo that describes the management interface of the wrapped * MBean managed by this WrapperModel. */ private transient javax.management.MBeanInfo __m_MBeanInfo; /** * Property MBeanInfoCache * * A cache of MBeanInfo objects keyed by MBean classes. */ private static transient java.util.Map __s_MBeanInfoCache; // Default constructor public WrapperModel() { this(null, null, true); } // Initializing constructor public WrapperModel(String sName, com.tangosol.coherence.Component compParent, boolean fInit) { super(sName, compParent, false); if (fInit) { __init(); } } // Main initializer public void __init() { // private initialization __initPrivate(); // state initialization: public and protected properties try { set_SnapshotMap(new java.util.HashMap()); } catch (java.lang.Exception e) { // re-throw as a runtime exception throw new com.tangosol.util.WrapperException(e); } // signal the end of the initialization set_Constructed(true); } // Private initializer protected void __initPrivate() { super.__initPrivate(); } //++ getter for static property _Instance /** * Getter for property _Instance.<p> * Auto generated */ public static com.tangosol.coherence.Component get_Instance() { return new com.tangosol.coherence.component.net.management.model.localModel.WrapperModel(); } //++ getter for static property _CLASS /** * Getter for property _CLASS.<p> * Property with auto-generated accessor that returns the Class object for a * given component. */ public static Class get_CLASS() { Class clz; try { clz = Class.forName("com.tangosol.coherence/component/net/management/model/localModel/WrapperModel".replace('/', '.')); } catch (ClassNotFoundException e) { throw new NoClassDefFoundError(e.getMessage()); } return clz; } //++ getter for autogen property _Module /** * This is an auto-generated method that returns the global [design time] * parent component. * * Note: the class generator will ignore any custom implementation for this * behavior. */ private com.tangosol.coherence.Component get_Module() { return this; } /** * Add this Model as a notification listener to the corresponding (Standard * or Dynamic) MBean. */ protected void _addNotificationListener() { // import javax.management.NotificationBroadcaster; try { Object oBean = getMBean(); if (oBean instanceof NotificationBroadcaster) { ((NotificationBroadcaster) oBean).addNotificationListener(this, null, null); } } catch (Throwable e) { _trace("Failed to add notification listener on MBean " + get_ModelName() + "\n" + getStackTrace(e), 3); } } // Declared at the super level /** * Add the Local Notification Listener on a Local Model to a Local Model * */ public com.tangosol.coherence.component.net.management.listenerHolder.LocalHolder _addNotificationListener(javax.management.NotificationListener listener, javax.management.NotificationFilter filter, Object handback) { if (!is_SubscribedTo()) { _addNotificationListener(); } return super._addNotificationListener(listener, filter, handback); } // Declared at the super level /** * Subscribe to the local model from a remote model using the Notification * Listener Reference */ public void _addRemoteNotificationListener(com.tangosol.coherence.component.net.management.listenerHolder.RemoteHolder holder, com.tangosol.coherence.component.net.management.Connector connector) { if (!is_SubscribedTo()) { _addNotificationListener(); } super._addRemoteNotificationListener(holder, connector); } /** * Remove this Model as a notification listener from the corresponding * (Standard and Dynamic) MBean. */ protected void _removeNotificationListener() { // import javax.management.NotificationBroadcaster; // import javax.management.NotificationEmitter; Object oBean = getMBean(); try { if (oBean instanceof NotificationEmitter) { ((NotificationEmitter) oBean).removeNotificationListener(this, null, null); } else { if (oBean instanceof NotificationBroadcaster) { ((NotificationBroadcaster) oBean).removeNotificationListener(this); } } } catch (Throwable e) // ListenerNotFoundException { _trace("Failed to remove notification listener on MBean " + get_ModelName() + "\n" + getStackTrace(e), 3); } } // Declared at the super level /** * Remove all the notifications for the specified listener from the model. * * @return a Set<LocalHolder> of holders associated with the specified * listener */ public java.util.Set _removeNotificationListener(javax.management.NotificationListener listener) { try { return super._removeNotificationListener(listener); } finally { if (!is_SubscribedTo()) { _removeNotificationListener(); } } } // Declared at the super level /** * Remove the Notification Listener from the model. * * @return the LocalHolder representing the listener/filter/handback tuple * or null if the registration did not exist */ public com.tangosol.coherence.component.net.management.listenerHolder.LocalHolder _removeNotificationListener(javax.management.NotificationListener listener, javax.management.NotificationFilter filter, Object handback) { try { return super._removeNotificationListener(listener, filter, handback); } finally { if (!is_SubscribedTo()) { _removeNotificationListener(); } } } // Declared at the super level /** * Remove all notifications from the model. */ public void _removeNotificationListeners() { if (is_SubscribedTo()) { _removeNotificationListener(); } super._removeNotificationListeners(); } // Declared at the super level /** * Unsubscribe remote notifications represented by the specified holder id * for the given member. */ public void _removeRemoteNotificationListener(int nMember, long lHolderId) { super._removeRemoteNotificationListener(nMember, lHolderId); if (!is_SubscribedTo()) { _removeNotificationListener(); } } /** * Check whether or not the specified exception could be ignored. The caller * would be responsible for returning an appropriate default value. */ protected void checkIgnoreException(Throwable e, String sMsg) { // import com.tangosol.util.Base; if (e instanceof UnsupportedOperationException || e.getCause() instanceof UnsupportedOperationException) { // Some of the base JVM Management (MemoryPool) Objects throw UOE // or RuntimeMBeanException when the option is "disabled"; ignore... return; } throw Base.ensureRuntimeException(e, sMsg); } /** * Search for the interface with the given name on the specified class. * This method does not load the class with the given name. * Check if the interface follows the standard MBean or MXBean standards: * 1) A standard MBean is defined by a Java interface called SomethingMBean * that is located in the same package as a Java class called Something that * implements that interface; * 2) An MXBean is defined by a Java interface called SomethingMXBean and a * Java class that implements that interface, but could be located in * another package; * 3) The annotation @MXBean can be also used to annotate the Java interface * in (2), instead of requiring the interface's name to be followed by the * MXBean suffix. * Refer: http://docs.oracle.com/javase/tutorial/jmx/mbeans * * @return the interface class if found; null otherwise */ protected static Class findCompliantInterface(Class clz, String sName) { // import javax.management.MXBean; _assert(clz != null); _assert(sName != null); if (clz.isInterface()) { // Standard MBean or MXBean or @MXBean Annotation if (clz.getName().equals(sName+"MBean") || clz.getName().endsWith("MXBean") || clz.isAnnotationPresent(MXBean.class)) { return clz; } } Class[] aclz = clz.getInterfaces(); for (int i = 0, c = aclz == null ? 0 : aclz.length; i < c; i++) { Class clzIntf = findCompliantInterface(aclz[i], sName); if (clzIntf != null) { return clzIntf; } } return null; } // Declared at the super level /** * Getter for property _MBeanComponent.<p> * The name of the corresponding MBean component. If not overridden at the * specific Model subcomponent, the naming convention is: * * sMBeanName = "Component.Manageable.ModelAdapter." + * (get_Name() - "Model") + "MBean"; * The name of the corresponding MBean component. If not overriden at the * specific Model subcomponent, the naming convention is: * * sMBeanName = "Component.Manageable.ModelAdapter." + * get_Name().replace("Model", "MBean"); */ public String get_MBeanComponent() { // Theoretically we could improve the generic algorithm to determine this name. return is_Notify() ? "Component.Manageable.ModelAdapter.WrapperMBean.WrapperEmitterMBean" : "Component.Manageable.ModelAdapter.WrapperMBean"; } // Accessor for the property "_NotificationInfo" /** * Getter for property _NotificationInfo.<p> */ public javax.management.MBeanNotificationInfo[] get_NotificationInfo() { return __m__NotificationInfo; } /** * Subclassing support. */ protected Object getAttribute(javax.management.MBeanAttributeInfo attrInfo) { // import com.tangosol.util.Base; // import com.tangosol.util.ClassHelper; // import javax.management.DynamicMBean; Object oBean = getMBean(); String sAttr = attrInfo.getName(); try { if (oBean instanceof DynamicMBean) { return ((DynamicMBean) oBean).getAttribute(sAttr); } else { String sMethodName = attrInfo.isIs() ? "is" + sAttr : "get" + sAttr; return invoke(sMethodName, ClassHelper.VOID); } } catch (Exception e) { throw Base.ensureRuntimeException(e); } } // Accessor for the property "MBean" /** * Getter for property MBean.<p> * The MBean wrapped by this WrapperModel. */ public Object getMBean() { return __m_MBean; } // Accessor for the property "MBeanInfo" /** * Getter for property MBeanInfo.<p> * The MBeanInfo that describes the management interface of the wrapped * MBean managed by this WrapperModel. */ public javax.management.MBeanInfo getMBeanInfo() { return __m_MBeanInfo; } // Accessor for the property "MBeanInfo" /** * Return an MBeanInfo for the given MBean object. * * @throws NotCompliantMBeanException if the given object is not a compliant * Dynamic or Standard MBean */ protected synchronized javax.management.MBeanInfo getMBeanInfo(Object oBean) throws javax.management.NotCompliantMBeanException { // import java.util.Map; // import java.util.WeakHashMap; // import javax.management.DynamicMBean; // import javax.management.MBeanInfo; // import javax.management.NotificationEmitter; _assert(oBean != null); Class clzBean = oBean.getClass(); Map mapCache = getMBeanInfoCache(); if (oBean instanceof NotificationEmitter) { set_NotificationInfo(((NotificationEmitter) oBean) .getNotificationInfo()); set_Notify(true); } if (mapCache == null) { setMBeanInfoCache(mapCache = new WeakHashMap()); } MBeanInfo info = (MBeanInfo) mapCache.get(clzBean); if (info == null) { if (oBean instanceof DynamicMBean) { return ((DynamicMBean) oBean).getMBeanInfo(); } else { info = introspectMBean(clzBean); } mapCache.put(clzBean, info); } return info; } // Accessor for the property "MBeanInfoCache" /** * Getter for property MBeanInfoCache.<p> * A cache of MBeanInfo objects keyed by MBean classes. */ protected static java.util.Map getMBeanInfoCache() { return __s_MBeanInfoCache; } /** * Return the MBean interface class that defines the management interface * for the given Standard MBean or MXBean class. * * @return the MBean interface class * @throw NotCompliantMBeanException if the given class is not a compliant * Standard MBean or MXBean class */ protected static Class getMBeanInterface(Class clzBean) throws javax.management.NotCompliantMBeanException { // import java.lang.reflect.Modifier; // import javax.management.NotCompliantMBeanException; _assert(clzBean != null); if (clzBean.isInterface() || clzBean.isPrimitive()) { throw new NotCompliantMBeanException("Illegal MBean type: " + clzBean); } for (Class clz = clzBean; clz != null; clz = clz.getSuperclass()) { Class clzIntf = findCompliantInterface(clz, clz.getName()); if (clzIntf != null && Modifier.isPublic(clzIntf.getModifiers())) { return clzIntf; } } throw new NotCompliantMBeanException("Illegal MBean: " + clzBean + " neither follows the Standard MBean conventions nor the MXBean conventions"); } // From interface: javax.management.NotificationListener public void handleNotification(javax.management.Notification notification, Object handback) { _handleNotification(notification); } /** * Return the MBeanInfo that describes the management interface exposed by * the given Standard MBean interface. * * @throw NotCompliantMBeanException if the given class is not a compliant * Standard MBean class */ protected javax.management.MBeanInfo introspectMBean(Class clzBean) throws javax.management.NotCompliantMBeanException { // import com.tangosol.net.management.annotation.Description; // import com.tangosol.util.Base; // import java.lang.reflect.Method; // import java.util.ArrayList; // import java.util.HashMap; // import java.util.List; // import java.util.Map; // import javax.management.MBeanInfo; // import javax.management.MBeanAttributeInfo; // import javax.management.MBeanOperationInfo; // import javax.management.NotCompliantMBeanException; Class clzBeanIntf = getMBeanInterface(clzBean); Method[] aMethod = clzBeanIntf.getMethods(); int cMethod = aMethod == null ? 0 : aMethod.length; Map mapAttrInfo = new HashMap(cMethod); List listOpInfo = new ArrayList(cMethod); Description descrMBean = (Description) clzBeanIntf.getAnnotation(Description.class); String sAttrDesc = "MBean attribute exposed for management."; String sOpDesc = "MBean operation exposed for management."; String sDesc = "MBean(Class=" + clzBean.getName() + ", Interface=" + clzBeanIntf.getName() + ")"; // If there is a description annotation for the MBean interface // we replace the standard description. if (descrMBean != null) { sDesc = descrMBean.value(); } // find all attributes and operations on the standard mbean interface for (int i = 0; i < cMethod; i++) { Method method = aMethod[i]; String sName = method.getName(); Class[] aclz = method.getParameterTypes(); Class clz = method.getReturnType(); int cParam = aclz == null ? 0 : aclz.length; String sAttrName = null; Class clzAttr = null; boolean fReadable = false; boolean fWritable = false; boolean fIs = false; Description descrMethod = (Description) method.getAnnotation(Description.class); // filter outlier operations: // X(), XY(), XYZ(), is(), and methods with more than 1 parameter if ((sName.length() >= 4 || sName.startsWith("is")) && !sName.equals("is") && cParam <= 1) { // process getters if (cParam == 0 && clz != Void.TYPE) { if (sName.startsWith("get")) { sAttrName = sName.substring(3); } else if (sName.startsWith("is") && (clz == Boolean.TYPE || clz == Boolean.class)) { sAttrName = sName.substring(2); fIs = true; } clzAttr = clz; fReadable = true; } // process setters else if (cParam == 1 && clz == Void.TYPE) { if (sName.startsWith("set")) { sAttrName = sName.substring(3); clzAttr = aclz[0]; fWritable = true; } } } // handle operations if (sAttrName == null) { if (descrMethod != null) { sOpDesc = descrMethod.value(); } listOpInfo.add(new MBeanOperationInfo(sOpDesc, method)); } // handle attributes else { MBeanAttributeInfo attrInfo = (MBeanAttributeInfo) mapAttrInfo.get(sAttrName); if (attrInfo != null) { if (clzAttr.getName().equals(attrInfo.getType())) { if (fReadable && attrInfo.isReadable() && fIs != attrInfo.isIs()) { throw Base.ensureRuntimeException( new NotCompliantMBeanException("Getter is" + sAttrName + " cannot co-exist with getter get" + sAttrName)); } else { fReadable = fReadable || attrInfo.isReadable(); fWritable = fWritable || attrInfo.isWritable(); fIs = fIs || attrInfo.isIs(); } } else { if (fWritable == attrInfo.isWritable()) { throw Base.ensureRuntimeException( new NotCompliantMBeanException( "Type mismatch between parameters of set" + sAttrName + " methods.")); } else { throw Base.ensureRuntimeException( new NotCompliantMBeanException( "Type mismatch between parameters of get or is" + sAttrName + " and set" + sAttrName + " methods.")); } } } if (descrMethod != null) { sAttrDesc = descrMethod.value(); } attrInfo = new MBeanAttributeInfo(sAttrName, clzAttr.getName(), sAttrDesc, fReadable, fWritable, fIs); mapAttrInfo.put(sAttrName, attrInfo); } } // assemble the final MBeanInfo MBeanAttributeInfo[] aAttrInfo = (MBeanAttributeInfo[]) mapAttrInfo.values().toArray(new MBeanAttributeInfo[mapAttrInfo.size()]); MBeanOperationInfo[] aOpInfo = (MBeanOperationInfo[]) listOpInfo.toArray(new MBeanOperationInfo[listOpInfo.size()]); return new MBeanInfo(clzBean.getName(), sDesc, aAttrInfo, null, aOpInfo, get_NotificationInfo()); } // Declared at the super level /** * Invoke the method with the specified name and parameters on the MBean * represented by this model. */ public Object invoke(int nOp, String sName, Object[] aoParam) throws java.lang.IllegalAccessException, java.lang.NoSuchMethodException, java.lang.reflect.InvocationTargetException, javax.management.MBeanException { return invoke(nOp, sName, aoParam, null); } // Declared at the super level /** * Invoke the method with the specified name and parameters on the MBean * represented by this model. */ public Object invoke(int nOp, String sName, Object[] aoParam, String[] asSignature) throws java.lang.IllegalAccessException, java.lang.NoSuchMethodException, java.lang.reflect.InvocationTargetException, javax.management.MBeanException { // import Component.Net.Management.Model.RemoteModel; // import javax.management.DynamicMBean; // import javax.management.Attribute; if (isDynamic()) { try { DynamicMBean oBean = (DynamicMBean) getMBean(); switch (nOp) { case RemoteModel.OP_GET: return oBean.getAttribute(sName); case RemoteModel.OP_SET: oBean.setAttribute(new Attribute(sName, aoParam[0])); return null; case RemoteModel.OP_INVOKE: return oBean.invoke(sName, aoParam, asSignature); default: throw new IllegalStateException(); } } catch (Exception e) { checkIgnoreException(e, null); return null; } } else { return super.invoke(nOp, sName, aoParam, asSignature); } } // Declared at the super level /** * Invoke the method with the specified name on the wrapped MBean with the * specified parameters. */ public Object invoke(String sMethod, Object[] aoParam) throws java.lang.IllegalAccessException, java.lang.NoSuchMethodException, java.lang.reflect.InvocationTargetException { // import com.tangosol.util.ClassHelper; Object oBean = getMBean(); _assert(oBean != null, "Managed object was not set"); return ClassHelper.invoke(oBean, sMethod, aoParam); } // Accessor for the property "_Notify" /** * Getter for property _Notify.<p> * flag that is true if the model supports notifications. */ public boolean is_Notify() { return __m__Notify; } // Accessor for the property "Dynamic" /** * True if the underlying model implements javax.management.DynamicMBean. */ public boolean isDynamic() { return __m_Dynamic; } // Declared at the super level /** * Must be supplemented at each specific Model implementation. */ public void readExternal(java.io.DataInput in) throws java.io.IOException { // import com.tangosol.util.ExternalizableHelper; // import java.util.Map; super.readExternal(in); Map mapSnapshot = get_SnapshotMap(); setDynamic(in.readBoolean()); set_Notify(in.readBoolean()); int cAttr = ExternalizableHelper.readInt(in); for (int i = 0; i < cAttr; i++) { String sAttrName = null; Object oAttrValue; try { sAttrName = ExternalizableHelper.readSafeUTF(in); oAttrValue = ExternalizableHelper.readObject(in); } catch (Exception e) { _trace("The MBean attribute \"" + sAttrName + "\" could not be retrieved; " + "all remaining attributes will be ignored:\n" + getStackTrace(e), 2); break; } mapSnapshot.put(sAttrName, oAttrValue); } } /** * Must be supplemented at each specific Model implementation. */ protected void readExternalImpl(java.io.DataInput in) throws java.io.IOException { } // Accessor for the property "_NotificationInfo" /** * Setter for property _NotificationInfo.<p> */ public void set_NotificationInfo(javax.management.MBeanNotificationInfo[] p_NotificationInfo) { __m__NotificationInfo = p_NotificationInfo; } // Accessor for the property "_Notify" /** * Setter for property _Notify.<p> * flag that is true if the model supports notifications. */ public void set_Notify(boolean notifyFlag) { __m__Notify = notifyFlag; } // Accessor for the property "Dynamic" /** * Setter for property Dynamic.<p> * Determines if an MBean represented by this Model is a Dynamic MBean. */ protected void setDynamic(boolean fDynamic) { __m_Dynamic = fDynamic; } // Accessor for the property "MBean" /** * Set the underlying Bean and the associated MBeanInfo */ public void setMBean(Object oBean) { // import com.tangosol.util.Base; // import javax.management.NotCompliantMBeanException; // import javax.management.DynamicMBean; _assert(oBean != null, "Managed object cannot be null"); setDynamic(oBean instanceof DynamicMBean); try { setMBeanInfo(getMBeanInfo(oBean)); } catch (NotCompliantMBeanException e) { throw Base.ensureRuntimeException(e); } __m_MBean = (oBean); } // Accessor for the property "MBeanInfo" /** * Setter for property MBeanInfo.<p> * The MBeanInfo that describes the management interface of the wrapped * MBean managed by this WrapperModel. */ public void setMBeanInfo(javax.management.MBeanInfo infoBean) { __m_MBeanInfo = infoBean; } // Accessor for the property "MBeanInfoCache" /** * Setter for property MBeanInfoCache.<p> * A cache of MBeanInfo objects keyed by MBean classes. */ protected static void setMBeanInfoCache(java.util.Map mapCache) { __s_MBeanInfoCache = mapCache; } // Declared at the super level /** * Must be supplemented at each specific Model implementation. */ public void writeExternal(java.io.DataOutput out) throws java.io.IOException { // import com.tangosol.util.ExternalizableHelper; // import javax.management.MBeanAttributeInfo; // import javax.management.MBeanInfo; super.writeExternal(out); MBeanInfo info = getMBeanInfo(); _assert(info != null); out.writeBoolean(isDynamic()); out.writeBoolean(is_Notify()); MBeanAttributeInfo[] aAttrInfo = info.getAttributes(); // prepare the attributes int cAttrs = aAttrInfo == null ? 0 : aAttrInfo.length; String[] asName = new String[cAttrs]; Object[] aoValue = new Object[cAttrs]; int ix = 0; for (int i = 0; i < cAttrs; i++) { MBeanAttributeInfo attrinfo = aAttrInfo[i]; if (attrinfo.isReadable()) { try { asName [ix] = attrinfo.getName(); aoValue[ix] = getAttribute(attrinfo); ix++; } catch (Exception e) { _trace("The value of the attribute \"" + attrinfo.getName() + "\" for MBean \"" + get_ModelName() + "\" could not be retrieved and is ignored; " + getStackTrace(e), 2); continue; } } } ExternalizableHelper.writeInt(out, cAttrs = ix); for (int i = 0; i < cAttrs; i++) { try { ExternalizableHelper.writeSafeUTF(out, asName[i]); ExternalizableHelper.writeObject (out, aoValue[i]); } catch (Exception e) { _trace("The MBean attribute \"" + asName[i] + "\" could not be serialized; " + "all remaining attributes will be ignored:\n" + getStackTrace(e), 2); break; } } } }
apache/hbase
36,757
hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.security.visibility; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABEL_QUALIFIER; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.junit.After; import org.junit.AfterClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult; import org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; /** * Base test class for visibility labels basic features */ public abstract class TestVisibilityLabels { public static final String TOPSECRET = "topsecret"; public static final String PUBLIC = "public"; public static final String PRIVATE = "private"; public static final String CONFIDENTIAL = "confidential"; public static final String SECRET = "secret"; public static final String COPYRIGHT = "\u00A9ABC"; public static final String ACCENT = "\u0941"; public static final String UNICODE_VIS_TAG = COPYRIGHT + "\"" + ACCENT + "\\" + SECRET + "\"" + "\u0027&\\"; public static final String UC1 = "\u0027\"\u002b"; public static final String UC2 = "\u002d\u003f"; public static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); public static final byte[] row1 = Bytes.toBytes("row1"); public static final byte[] row2 = Bytes.toBytes("row2"); public static final byte[] row3 = Bytes.toBytes("row3"); public static final byte[] row4 = Bytes.toBytes("row4"); public final static byte[] fam = Bytes.toBytes("info"); public final static byte[] qual = Bytes.toBytes("qual"); public final static byte[] value = Bytes.toBytes("value"); public static Configuration conf; private volatile boolean killedRS = false; @Rule public final TestName TEST_NAME = new TestName(); public static User SUPERUSER, USER1; @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @After public void tearDown() throws Exception { killedRS = false; } @Test public void testSecurityCapabilities() throws Exception { List<SecurityCapability> capabilities = TEST_UTIL.getConnection().getAdmin().getSecurityCapabilities(); assertTrue("CELL_VISIBILITY capability is missing", capabilities.contains(SecurityCapability.CELL_VISIBILITY)); } @Test public void testSimpleVisibilityLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, SECRET + "|" + CONFIDENTIAL, PRIVATE + "|" + CONFIDENTIAL)) { Scan s = new Scan(); s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL, PRIVATE)); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(3); assertTrue(next.length == 2); CellScanner cellScanner = next[0].cellScanner(); cellScanner.advance(); Cell current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length)); cellScanner = next[1].cellScanner(); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row2, 0, row2.length)); } } @Test public void testSimpleVisibilityLabelsWithUniCodeCharacters() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, SECRET + "|" + CellVisibility.quote(COPYRIGHT), "(" + CellVisibility.quote(COPYRIGHT) + "&" + CellVisibility.quote(ACCENT) + ")|" + CONFIDENTIAL, CellVisibility.quote(UNICODE_VIS_TAG) + "&" + SECRET)) { Scan s = new Scan(); s.setAuthorizations( new Authorizations(SECRET, CONFIDENTIAL, PRIVATE, COPYRIGHT, ACCENT, UNICODE_VIS_TAG)); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(3); assertTrue(next.length == 3); CellScanner cellScanner = next[0].cellScanner(); cellScanner.advance(); Cell current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length)); cellScanner = next[1].cellScanner(); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row2, 0, row2.length)); cellScanner = next[2].cellScanner(); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row3, 0, row3.length)); } } @Test public void testAuthorizationsWithSpecialUnicodeCharacters() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, CellVisibility.quote(UC1) + "|" + CellVisibility.quote(UC2), CellVisibility.quote(UC1), CellVisibility.quote(UNICODE_VIS_TAG))) { Scan s = new Scan(); s.setAuthorizations(new Authorizations(UC1, UC2, ACCENT, UNICODE_VIS_TAG)); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(3); assertTrue(next.length == 3); CellScanner cellScanner = next[0].cellScanner(); cellScanner.advance(); Cell current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row1, 0, row1.length)); cellScanner = next[1].cellScanner(); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row2, 0, row2.length)); cellScanner = next[2].cellScanner(); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row3, 0, row3.length)); } } @Test public void testVisibilityLabelsWithComplexLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!" + TOPSECRET, "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")", "(" + PRIVATE + "&" + CONFIDENTIAL + "&" + SECRET + ")")) { Scan s = new Scan(); s.setAuthorizations(new Authorizations(TOPSECRET, CONFIDENTIAL, PRIVATE, PUBLIC, SECRET)); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(4); assertEquals(3, next.length); CellScanner cellScanner = next[0].cellScanner(); cellScanner.advance(); Cell current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row2, 0, row2.length)); cellScanner = next[1].cellScanner(); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row3, 0, row3.length)); cellScanner = next[2].cellScanner(); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), row4, 0, row4.length)); } } @Test public void testVisibilityLabelsThatDoesNotPassTheCriteria() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE)) { Scan s = new Scan(); s.setAuthorizations(new Authorizations(PUBLIC)); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(3); assertTrue(next.length == 0); } } @Test public void testVisibilityLabelsInPutsThatDoesNotMatchAnyDefinedLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try { createTableAndWriteDataWithLabels(tableName, "SAMPLE_LABEL", "TEST"); fail("Should have failed with failed sanity check exception"); } catch (Exception e) { } } @Test public void testVisibilityLabelsInScanThatDoesNotMatchAnyDefinedLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE)) { Scan s = new Scan(); s.setAuthorizations(new Authorizations("SAMPLE")); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(3); assertTrue(next.length == 0); } } @Test public void testVisibilityLabelsWithGet() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL + "&!" + PRIVATE, SECRET + "&" + CONFIDENTIAL + "&" + PRIVATE)) { Get get = new Get(row1); get.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL)); Result result = table.get(get); assertTrue(!result.isEmpty()); Cell cell = result.getColumnLatestCell(fam, qual); assertTrue(Bytes.equals(value, 0, value.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); } } @Test public void testVisibilityLabelsOnKillingOfRSContainingLabelsTable() throws Exception { List<RegionServerThread> regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); int liveRS = 0; for (RegionServerThread rsThreads : regionServerThreads) { if (!rsThreads.getRegionServer().isAborted()) { liveRS++; } } if (liveRS == 1) { TEST_UTIL.getHBaseCluster().startRegionServer(); } Thread t1 = new Thread() { @Override public void run() { List<RegionServerThread> regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); for (RegionServerThread rsThread : regionServerThreads) { List<HRegion> onlineRegions = rsThread.getRegionServer().getRegions(LABELS_TABLE_NAME); if (onlineRegions.size() > 0) { rsThread.getRegionServer().abort("Aborting "); killedRS = true; break; } } } }; t1.start(); final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); Thread t = new Thread() { @Override public void run() { try { while (!killedRS) { Thread.sleep(1); } createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE); } catch (Exception e) { } } }; t.start(); regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); while (!killedRS) { Thread.sleep(10); } regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); for (RegionServerThread rsThread : regionServerThreads) { while (true) { if (!rsThread.getRegionServer().isAborted()) { List<HRegion> onlineRegions = rsThread.getRegionServer().getRegions(LABELS_TABLE_NAME); if (onlineRegions.size() > 0) { break; } else { Thread.sleep(10); } } else { break; } } } TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000); t.join(); try (Table table = TEST_UTIL.getConnection().getTable(tableName)) { Scan s = new Scan(); s.setAuthorizations(new Authorizations(SECRET)); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(3); assertTrue(next.length == 1); } } @Test public void testVisibilityLabelsOnRSRestart() throws Exception { final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); List<RegionServerThread> regionServerThreads = TEST_UTIL.getHBaseCluster().getRegionServerThreads(); for (RegionServerThread rsThread : regionServerThreads) { rsThread.getRegionServer().abort("Aborting "); } // Start one new RS RegionServerThread rs = TEST_UTIL.getHBaseCluster().startRegionServer(); waitForLabelsRegionAvailability(rs.getRegionServer()); try (Table table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE)) { Scan s = new Scan(); s.setAuthorizations(new Authorizations(SECRET)); ResultScanner scanner = table.getScanner(s); Result[] next = scanner.next(3); assertTrue(next.length == 1); } } protected void waitForLabelsRegionAvailability(HRegionServer regionServer) { while (!regionServer.isOnline()) { try { Thread.sleep(10); } catch (InterruptedException e) { } } while (regionServer.getRegions(LABELS_TABLE_NAME).isEmpty()) { try { Thread.sleep(10); } catch (InterruptedException e) { } } } @Test public void testVisibilityLabelsInGetThatDoesNotMatchAnyDefinedLabels() throws Exception { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = createTableAndWriteDataWithLabels(tableName, "(" + SECRET + "|" + CONFIDENTIAL + ")", PRIVATE)) { Get get = new Get(row1); get.setAuthorizations(new Authorizations("SAMPLE")); Result result = table.get(get); assertTrue(result.isEmpty()); } } @Test public void testSetAndGetUserAuths() throws Throwable { final String user = "user1"; PrivilegedExceptionAction<Void> action = new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { String[] auths = { SECRET, CONFIDENTIAL }; try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.setAuths(conn, auths, user); } catch (Throwable e) { throw new IOException(e); } return null; } }; SUPERUSER.runAs(action); try (Table ht = TEST_UTIL.getConnection().getTable(LABELS_TABLE_NAME)) { Scan scan = new Scan(); scan.setAuthorizations(new Authorizations(VisibilityUtils.SYSTEM_LABEL)); ResultScanner scanner = ht.getScanner(scan); Result result = null; List<Result> results = new ArrayList<>(); while ((result = scanner.next()) != null) { results.add(result); } List<String> auths = extractAuths(user, results); assertTrue(auths.contains(SECRET)); assertTrue(auths.contains(CONFIDENTIAL)); assertEquals(2, auths.size()); } action = new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { GetAuthsResponse authsResponse = null; try (Connection conn = ConnectionFactory.createConnection(conf)) { authsResponse = VisibilityClient.getAuths(conn, user); } catch (Throwable e) { throw new IOException(e); } List<String> authsList = new ArrayList<>(authsResponse.getAuthList().size()); for (ByteString authBS : authsResponse.getAuthList()) { authsList.add(Bytes.toString(authBS.toByteArray())); } assertEquals(2, authsList.size()); assertTrue(authsList.contains(SECRET)); assertTrue(authsList.contains(CONFIDENTIAL)); return null; } }; SUPERUSER.runAs(action); // Try doing setAuths once again and there should not be any duplicates action = new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { String[] auths1 = { SECRET, CONFIDENTIAL }; GetAuthsResponse authsResponse = null; try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.setAuths(conn, auths1, user); try { authsResponse = VisibilityClient.getAuths(conn, user); } catch (Throwable e) { throw new IOException(e); } } catch (Throwable e) { } List<String> authsList = new ArrayList<>(authsResponse.getAuthList().size()); for (ByteString authBS : authsResponse.getAuthList()) { authsList.add(Bytes.toString(authBS.toByteArray())); } assertEquals(2, authsList.size()); assertTrue(authsList.contains(SECRET)); assertTrue(authsList.contains(CONFIDENTIAL)); return null; } }; SUPERUSER.runAs(action); } protected List<String> extractAuths(String user, List<Result> results) { List<String> auths = new ArrayList<>(); for (Result result : results) { Cell labelCell = result.getColumnLatestCell(LABELS_TABLE_FAMILY, LABEL_QUALIFIER); Cell userAuthCell = result.getColumnLatestCell(LABELS_TABLE_FAMILY, Bytes.toBytes(user)); if (userAuthCell != null) { auths.add(Bytes.toString(labelCell.getValueArray(), labelCell.getValueOffset(), labelCell.getValueLength())); } } return auths; } @Test public void testClearUserAuths() throws Throwable { PrivilegedExceptionAction<Void> action = new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { String[] auths = { SECRET, CONFIDENTIAL, PRIVATE }; String user = "testUser"; try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.setAuths(conn, auths, user); } catch (Throwable e) { throw new IOException(e); } // Removing the auths for SECRET and CONFIDENTIAL for the user. // Passing a non existing auth also. auths = new String[] { SECRET, PUBLIC, CONFIDENTIAL }; VisibilityLabelsResponse response = null; try (Connection conn = ConnectionFactory.createConnection(conf)) { response = VisibilityClient.clearAuths(conn, auths, user); } catch (Throwable e) { fail("Should not have failed"); } List<RegionActionResult> resultList = response.getResultList(); assertEquals(3, resultList.size()); assertTrue(resultList.get(0).getException().getValue().isEmpty()); assertEquals("org.apache.hadoop.hbase.DoNotRetryIOException", resultList.get(1).getException().getName()); assertTrue(Bytes.toString(resultList.get(1).getException().getValue().toByteArray()) .contains("org.apache.hadoop.hbase.security.visibility.InvalidLabelException: " + "Label 'public' is not set for the user testUser")); assertTrue(resultList.get(2).getException().getValue().isEmpty()); try (Connection connection = ConnectionFactory.createConnection(conf); Table ht = connection.getTable(LABELS_TABLE_NAME)) { ResultScanner scanner = ht.getScanner(new Scan()); Result result = null; List<Result> results = new ArrayList<>(); while ((result = scanner.next()) != null) { results.add(result); } List<String> curAuths = extractAuths(user, results); assertTrue(curAuths.contains(PRIVATE)); assertEquals(1, curAuths.size()); } GetAuthsResponse authsResponse = null; try (Connection conn = ConnectionFactory.createConnection(conf)) { authsResponse = VisibilityClient.getAuths(conn, user); } catch (Throwable e) { throw new IOException(e); } List<String> authsList = new ArrayList<>(authsResponse.getAuthList().size()); for (ByteString authBS : authsResponse.getAuthList()) { authsList.add(Bytes.toString(authBS.toByteArray())); } assertEquals(1, authsList.size()); assertTrue(authsList.contains(PRIVATE)); return null; } }; SUPERUSER.runAs(action); } @Test public void testLabelsWithCheckAndPut() throws Throwable { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = TEST_UTIL.createTable(tableName, fam)) { byte[] row1 = Bytes.toBytes("row1"); Put put = new Put(row1); put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value); put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL)); table.checkAndMutate(row1, fam).qualifier(qual).ifNotExists().thenPut(put); byte[] row2 = Bytes.toBytes("row2"); put = new Put(row2); put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value); put.setCellVisibility(new CellVisibility(SECRET)); table.checkAndMutate(row2, fam).qualifier(qual).ifNotExists().thenPut(put); Scan scan = new Scan(); scan.setAuthorizations(new Authorizations(SECRET)); ResultScanner scanner = table.getScanner(scan); Result result = scanner.next(); assertTrue(!result.isEmpty()); assertTrue(Bytes.equals(row2, result.getRow())); result = scanner.next(); assertNull(result); } } @Test public void testLabelsWithIncrement() throws Throwable { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = TEST_UTIL.createTable(tableName, fam)) { byte[] row1 = Bytes.toBytes("row1"); byte[] val = Bytes.toBytes(1L); Put put = new Put(row1); put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, val); put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL)); table.put(put); Get get = new Get(row1); get.setAuthorizations(new Authorizations(SECRET)); Result result = table.get(get); assertTrue(result.isEmpty()); table.incrementColumnValue(row1, fam, qual, 2L); result = table.get(get); assertTrue(result.isEmpty()); Increment increment = new Increment(row1); increment.addColumn(fam, qual, 2L); increment.setCellVisibility(new CellVisibility(SECRET)); table.increment(increment); result = table.get(get); assertTrue(!result.isEmpty()); } } @Test public void testLabelsWithAppend() throws Throwable { TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); try (Table table = TEST_UTIL.createTable(tableName, fam)) { byte[] row1 = Bytes.toBytes("row1"); byte[] val = Bytes.toBytes("a"); Put put = new Put(row1); put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, val); put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL)); table.put(put); Get get = new Get(row1); get.setAuthorizations(new Authorizations(SECRET)); Result result = table.get(get); assertTrue(result.isEmpty()); Append append = new Append(row1); append.addColumn(fam, qual, Bytes.toBytes("b")); table.append(append); result = table.get(get); assertTrue(result.isEmpty()); append = new Append(row1); append.addColumn(fam, qual, Bytes.toBytes("c")); append.setCellVisibility(new CellVisibility(SECRET)); table.append(append); result = table.get(get); assertTrue(!result.isEmpty()); } } @Test public void testUserShouldNotDoDDLOpOnLabelsTable() throws Exception { Admin admin = TEST_UTIL.getAdmin(); try { admin.disableTable(LABELS_TABLE_NAME); fail("Lables table should not get disabled by user."); } catch (Exception e) { } try { admin.deleteTable(LABELS_TABLE_NAME); fail("Lables table should not get disabled by user."); } catch (Exception e) { } try { ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("testFamily")).build(); admin.addColumnFamily(LABELS_TABLE_NAME, columnFamilyDescriptor); fail("Lables table should not get altered by user."); } catch (Exception e) { } try { admin.deleteColumnFamily(LABELS_TABLE_NAME, VisibilityConstants.LABELS_TABLE_FAMILY); fail("Lables table should not get altered by user."); } catch (Exception e) { } try { ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(VisibilityConstants.LABELS_TABLE_FAMILY) .setBloomFilterType(BloomType.ROWCOL).build(); admin.modifyColumnFamily(LABELS_TABLE_NAME, familyDescriptor); fail("Lables table should not get altered by user."); } catch (Exception e) { } try { TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(LABELS_TABLE_NAME); ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f2")).build(); tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor); admin.modifyTable(tableDescriptorBuilder.build()); fail("Lables table should not get altered by user."); } catch (Exception e) { } } @Test public void testMultipleVersions() throws Exception { final byte[] r1 = Bytes.toBytes("row1"); final byte[] r2 = Bytes.toBytes("row2"); final byte[] v1 = Bytes.toBytes("100"); final byte[] v2 = Bytes.toBytes("101"); final byte[] fam2 = Bytes.toBytes("info2"); final byte[] qual2 = Bytes.toBytes("qual2"); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); // Default max versions is 1. TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(fam2).setMaxVersions(5).build()) .build(); TEST_UTIL.getAdmin().createTable(tableDescriptor); try (Table table = TEST_UTIL.getConnection().getTable(tableName)) { Put put = new Put(r1); put.addColumn(fam, qual, 3L, v1); put.addColumn(fam, qual2, 3L, v1); put.addColumn(fam2, qual, 3L, v1); put.addColumn(fam2, qual2, 3L, v1); put.setCellVisibility(new CellVisibility(SECRET)); table.put(put); put = new Put(r1); put.addColumn(fam, qual, 4L, v2); put.addColumn(fam, qual2, 4L, v2); put.addColumn(fam2, qual, 4L, v2); put.addColumn(fam2, qual2, 4L, v2); put.setCellVisibility(new CellVisibility(PRIVATE)); table.put(put); put = new Put(r2); put.addColumn(fam, qual, 3L, v1); put.addColumn(fam, qual2, 3L, v1); put.addColumn(fam2, qual, 3L, v1); put.addColumn(fam2, qual2, 3L, v1); put.setCellVisibility(new CellVisibility(SECRET)); table.put(put); put = new Put(r2); put.addColumn(fam, qual, 4L, v2); put.addColumn(fam, qual2, 4L, v2); put.addColumn(fam2, qual, 4L, v2); put.addColumn(fam2, qual2, 4L, v2); put.setCellVisibility(new CellVisibility(SECRET)); table.put(put); Scan s = new Scan(); s.readVersions(1); s.setAuthorizations(new Authorizations(SECRET)); ResultScanner scanner = table.getScanner(s); Result result = scanner.next(); assertTrue(Bytes.equals(r1, result.getRow())); // for cf 'fam' max versions in HCD is 1. So the old version cells, which are having matching // CellVisibility with Authorizations, should not get considered in the label evaluation at // all. assertNull(result.getColumnLatestCell(fam, qual)); assertNull(result.getColumnLatestCell(fam, qual2)); // for cf 'fam2' max versions in HCD is > 1. So we can consider the old version cells, which // are having matching CellVisibility with Authorizations, in the label evaluation. It can // just skip those recent versions for which visibility is not there as per the new version's // CellVisibility. The old versions which are having visibility can be send back Cell cell = result.getColumnLatestCell(fam2, qual); assertNotNull(cell); assertTrue(Bytes.equals(v1, 0, v1.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); cell = result.getColumnLatestCell(fam2, qual2); assertNotNull(cell); assertTrue(Bytes.equals(v1, 0, v1.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); result = scanner.next(); assertTrue(Bytes.equals(r2, result.getRow())); cell = result.getColumnLatestCell(fam, qual); assertNotNull(cell); assertTrue(Bytes.equals(v2, 0, v2.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); cell = result.getColumnLatestCell(fam, qual2); assertNotNull(cell); assertTrue(Bytes.equals(v2, 0, v2.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); cell = result.getColumnLatestCell(fam2, qual); assertNotNull(cell); assertTrue(Bytes.equals(v2, 0, v2.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); cell = result.getColumnLatestCell(fam2, qual2); assertNotNull(cell); assertTrue(Bytes.equals(v2, 0, v2.length, cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); } } @Test public void testMutateRow() throws Exception { final byte[] qual2 = Bytes.toBytes("qual2"); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build(); TEST_UTIL.getAdmin().createTable(tableDescriptor); try (Table table = TEST_UTIL.getConnection().getTable(tableName)) { Put p1 = new Put(row1); p1.addColumn(fam, qual, value); p1.setCellVisibility(new CellVisibility(CONFIDENTIAL)); Put p2 = new Put(row1); p2.addColumn(fam, qual2, value); p2.setCellVisibility(new CellVisibility(SECRET)); RowMutations rm = new RowMutations(row1); rm.add(p1); rm.add(p2); table.mutateRow(rm); Get get = new Get(row1); get.setAuthorizations(new Authorizations(CONFIDENTIAL)); Result result = table.get(get); assertTrue(result.containsColumn(fam, qual)); assertFalse(result.containsColumn(fam, qual2)); get.setAuthorizations(new Authorizations(SECRET)); result = table.get(get); assertFalse(result.containsColumn(fam, qual)); assertTrue(result.containsColumn(fam, qual2)); } } @Test public void testFlushedFileWithVisibilityTags() throws Exception { final byte[] qual2 = Bytes.toBytes("qual2"); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam)).build(); TEST_UTIL.getAdmin().createTable(tableDescriptor); try (Table table = TEST_UTIL.getConnection().getTable(tableName)) { Put p1 = new Put(row1); p1.addColumn(fam, qual, value); p1.setCellVisibility(new CellVisibility(CONFIDENTIAL)); Put p2 = new Put(row1); p2.addColumn(fam, qual2, value); p2.setCellVisibility(new CellVisibility(SECRET)); RowMutations rm = new RowMutations(row1); rm.add(p1); rm.add(p2); table.mutateRow(rm); } TEST_UTIL.getAdmin().flush(tableName); List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(tableName); HStore store = regions.get(0).getStore(fam); Collection<HStoreFile> storefiles = store.getStorefiles(); assertTrue(storefiles.size() > 0); for (HStoreFile storeFile : storefiles) { assertTrue(storeFile.getReader().getHFileReader().getFileContext().isIncludesTags()); } } static Table createTableAndWriteDataWithLabels(TableName tableName, String... labelExps) throws Exception { List<Put> puts = new ArrayList<>(labelExps.length); for (int i = 0; i < labelExps.length; i++) { Put put = new Put(Bytes.toBytes("row" + (i + 1))); put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value); put.setCellVisibility(new CellVisibility(labelExps[i])); puts.add(put); } Table table = TEST_UTIL.createTable(tableName, fam); table.put(puts); return table; } public static void addLabels() throws Exception { PrivilegedExceptionAction<VisibilityLabelsResponse> action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() { @Override public VisibilityLabelsResponse run() throws Exception { String[] labels = { SECRET, TOPSECRET, CONFIDENTIAL, PUBLIC, PRIVATE, COPYRIGHT, ACCENT, UNICODE_VIS_TAG, UC1, UC2 }; try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.addLabels(conn, labels); } catch (Throwable t) { throw new IOException(t); } return null; } }; SUPERUSER.runAs(action); } }
googleapis/google-cloud-java
36,668
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/UpdateWorkerPoolRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; /** * * * <pre> * Request to update a `WorkerPool`. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest} */ public final class UpdateWorkerPoolRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest) UpdateWorkerPoolRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateWorkerPoolRequest.newBuilder() to construct. private UpdateWorkerPoolRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateWorkerPoolRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateWorkerPoolRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UpdateWorkerPoolRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UpdateWorkerPoolRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UpdateWorkerPoolRequest.class, com.google.cloudbuild.v1.UpdateWorkerPoolRequest.Builder.class); } private int bitField0_; public static final int WORKER_POOL_FIELD_NUMBER = 1; private com.google.cloudbuild.v1.WorkerPool workerPool_; /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workerPool field is set. */ @java.lang.Override public boolean hasWorkerPool() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workerPool. */ @java.lang.Override public com.google.cloudbuild.v1.WorkerPool getWorkerPool() { return workerPool_ == null ? com.google.cloudbuild.v1.WorkerPool.getDefaultInstance() : workerPool_; } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloudbuild.v1.WorkerPoolOrBuilder getWorkerPoolOrBuilder() { return workerPool_ == null ? com.google.cloudbuild.v1.WorkerPool.getDefaultInstance() : workerPool_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 4; private boolean validateOnly_ = false; /** * * * <pre> * If set, validate the request and preview the response, but do not actually * post it. * </pre> * * <code>bool validate_only = 4;</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getWorkerPool()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } if (validateOnly_ != false) { output.writeBool(4, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getWorkerPool()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v1.UpdateWorkerPoolRequest)) { return super.equals(obj); } com.google.cloudbuild.v1.UpdateWorkerPoolRequest other = (com.google.cloudbuild.v1.UpdateWorkerPoolRequest) obj; if (hasWorkerPool() != other.hasWorkerPool()) return false; if (hasWorkerPool()) { if (!getWorkerPool().equals(other.getWorkerPool())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasWorkerPool()) { hash = (37 * hash) + WORKER_POOL_FIELD_NUMBER; hash = (53 * hash) + getWorkerPool().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v1.UpdateWorkerPoolRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to update a `WorkerPool`. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest) com.google.cloudbuild.v1.UpdateWorkerPoolRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UpdateWorkerPoolRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UpdateWorkerPoolRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UpdateWorkerPoolRequest.class, com.google.cloudbuild.v1.UpdateWorkerPoolRequest.Builder.class); } // Construct using com.google.cloudbuild.v1.UpdateWorkerPoolRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getWorkerPoolFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; workerPool_ = null; if (workerPoolBuilder_ != null) { workerPoolBuilder_.dispose(); workerPoolBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UpdateWorkerPoolRequest_descriptor; } @java.lang.Override public com.google.cloudbuild.v1.UpdateWorkerPoolRequest getDefaultInstanceForType() { return com.google.cloudbuild.v1.UpdateWorkerPoolRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v1.UpdateWorkerPoolRequest build() { com.google.cloudbuild.v1.UpdateWorkerPoolRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v1.UpdateWorkerPoolRequest buildPartial() { com.google.cloudbuild.v1.UpdateWorkerPoolRequest result = new com.google.cloudbuild.v1.UpdateWorkerPoolRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloudbuild.v1.UpdateWorkerPoolRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.workerPool_ = workerPoolBuilder_ == null ? workerPool_ : workerPoolBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v1.UpdateWorkerPoolRequest) { return mergeFrom((com.google.cloudbuild.v1.UpdateWorkerPoolRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v1.UpdateWorkerPoolRequest other) { if (other == com.google.cloudbuild.v1.UpdateWorkerPoolRequest.getDefaultInstance()) return this; if (other.hasWorkerPool()) { mergeWorkerPool(other.getWorkerPool()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getWorkerPoolFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 32: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloudbuild.v1.WorkerPool workerPool_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.WorkerPool, com.google.cloudbuild.v1.WorkerPool.Builder, com.google.cloudbuild.v1.WorkerPoolOrBuilder> workerPoolBuilder_; /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workerPool field is set. */ public boolean hasWorkerPool() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workerPool. */ public com.google.cloudbuild.v1.WorkerPool getWorkerPool() { if (workerPoolBuilder_ == null) { return workerPool_ == null ? com.google.cloudbuild.v1.WorkerPool.getDefaultInstance() : workerPool_; } else { return workerPoolBuilder_.getMessage(); } } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkerPool(com.google.cloudbuild.v1.WorkerPool value) { if (workerPoolBuilder_ == null) { if (value == null) { throw new NullPointerException(); } workerPool_ = value; } else { workerPoolBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkerPool(com.google.cloudbuild.v1.WorkerPool.Builder builderForValue) { if (workerPoolBuilder_ == null) { workerPool_ = builderForValue.build(); } else { workerPoolBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeWorkerPool(com.google.cloudbuild.v1.WorkerPool value) { if (workerPoolBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && workerPool_ != null && workerPool_ != com.google.cloudbuild.v1.WorkerPool.getDefaultInstance()) { getWorkerPoolBuilder().mergeFrom(value); } else { workerPool_ = value; } } else { workerPoolBuilder_.mergeFrom(value); } if (workerPool_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearWorkerPool() { bitField0_ = (bitField0_ & ~0x00000001); workerPool_ = null; if (workerPoolBuilder_ != null) { workerPoolBuilder_.dispose(); workerPoolBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloudbuild.v1.WorkerPool.Builder getWorkerPoolBuilder() { bitField0_ |= 0x00000001; onChanged(); return getWorkerPoolFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloudbuild.v1.WorkerPoolOrBuilder getWorkerPoolOrBuilder() { if (workerPoolBuilder_ != null) { return workerPoolBuilder_.getMessageOrBuilder(); } else { return workerPool_ == null ? com.google.cloudbuild.v1.WorkerPool.getDefaultInstance() : workerPool_; } } /** * * * <pre> * Required. The `WorkerPool` to update. * * The `name` field is used to identify the `WorkerPool` to update. * Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`. * </pre> * * <code> * .google.devtools.cloudbuild.v1.WorkerPool worker_pool = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.WorkerPool, com.google.cloudbuild.v1.WorkerPool.Builder, com.google.cloudbuild.v1.WorkerPoolOrBuilder> getWorkerPoolFieldBuilder() { if (workerPoolBuilder_ == null) { workerPoolBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.WorkerPool, com.google.cloudbuild.v1.WorkerPool.Builder, com.google.cloudbuild.v1.WorkerPoolOrBuilder>( getWorkerPool(), getParentForChildren(), isClean()); workerPool_ = null; } return workerPoolBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * A mask specifying which fields in `worker_pool` to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private boolean validateOnly_; /** * * * <pre> * If set, validate the request and preview the response, but do not actually * post it. * </pre> * * <code>bool validate_only = 4;</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * If set, validate the request and preview the response, but do not actually * post it. * </pre> * * <code>bool validate_only = 4;</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * If set, validate the request and preview the response, but do not actually * post it. * </pre> * * <code>bool validate_only = 4;</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest) private static final com.google.cloudbuild.v1.UpdateWorkerPoolRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v1.UpdateWorkerPoolRequest(); } public static com.google.cloudbuild.v1.UpdateWorkerPoolRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateWorkerPoolRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateWorkerPoolRequest>() { @java.lang.Override public UpdateWorkerPoolRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateWorkerPoolRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateWorkerPoolRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v1.UpdateWorkerPoolRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/datasketches-java
36,936
src/test/java/org/apache/datasketches/kll/KllItemsSketchTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.datasketches.kll; import static java.lang.foreign.ValueLayout.JAVA_BYTE; import static java.lang.foreign.ValueLayout.JAVA_INT_UNALIGNED; import static java.lang.Math.ceil; import static org.apache.datasketches.kll.KllSketch.SketchStructure.COMPACT_EMPTY; import static org.apache.datasketches.kll.KllSketch.SketchStructure.COMPACT_FULL; import static org.apache.datasketches.kll.KllSketch.SketchStructure.COMPACT_SINGLE; import static org.apache.datasketches.kll.KllSketch.SketchStructure.UPDATABLE; import static org.apache.datasketches.kll.KllSketch.SketchType.ITEMS_SKETCH; import static org.apache.datasketches.quantilescommon.LongsAsOrderableStrings.getString; import static org.apache.datasketches.quantilescommon.QuantileSearchCriteria.EXCLUSIVE; import static org.apache.datasketches.quantilescommon.QuantileSearchCriteria.INCLUSIVE; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import java.lang.foreign.MemorySegment; import java.util.Comparator; import java.util.Random; import org.apache.datasketches.common.ArrayOfStringsSerDe; import org.apache.datasketches.common.MemorySegmentStatus; import org.apache.datasketches.common.SketchesArgumentException; import org.apache.datasketches.common.Util; import org.apache.datasketches.kll.KllItemsSketch; import org.apache.datasketches.kll.KllMemorySegmentValidate; import org.apache.datasketches.kll.KllPreambleUtil; import org.apache.datasketches.kll.KllSketch; import org.apache.datasketches.kll.KllSketch.SketchType; import org.apache.datasketches.quantilescommon.GenericSortedView; import org.apache.datasketches.quantilescommon.GenericSortedViewIterator; import org.apache.datasketches.quantilescommon.QuantilesGenericSketchIterator; import org.testng.annotations.Test; public class KllItemsSketchTest { private static final double PMF_EPS_FOR_K_8 = 0.35; // PMF rank error (epsilon) for k=8 private static final double PMF_EPS_FOR_K_128 = 0.025; // PMF rank error (epsilon) for k=128 private static final double PMF_EPS_FOR_K_256 = 0.013; // PMF rank error (epsilon) for k=256 private static final double NUMERIC_NOISE_TOLERANCE = 1E-6; private final ArrayOfStringsSerDe serDe = new ArrayOfStringsSerDe(); @Test public void empty() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); sketch.update(null); // this must not change anything assertTrue(sketch.isEmpty()); assertEquals(sketch.getN(), 0); assertEquals(sketch.getNumRetained(), 0); try { sketch.getRank("", INCLUSIVE); fail(); } catch (final SketchesArgumentException e) {} try { sketch.getMinItem(); fail(); } catch (final SketchesArgumentException e) {} try { sketch.getMaxItem(); fail(); } catch (final SketchesArgumentException e) {} try { sketch.getQuantile(0.5); fail(); } catch (final SketchesArgumentException e) {} try { sketch.getQuantiles(new double[] {0}); fail(); } catch (final SketchesArgumentException e) {} try { sketch.getPMF(new String[] {""}); fail(); } catch (final SketchesArgumentException e) {} try { sketch.getCDF(new String[] {""}); fail(); } catch (final SketchesArgumentException e) {} assertNotNull(sketch.toString(true, true)); assertNotNull(sketch.toString()); } @Test(expectedExceptions = SketchesArgumentException.class) public void getQuantileInvalidArg() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); sketch.update("A"); sketch.getQuantile(-1.0); } @Test(expectedExceptions = SketchesArgumentException.class) public void getQuantilesInvalidArg() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); sketch.update("A"); sketch.getQuantiles(new double[] {2.0}); } @Test public void oneValue() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); sketch.update("A"); assertFalse(sketch.isEmpty()); assertEquals(sketch.getN(), 1); assertEquals(sketch.getNumRetained(), 1); assertEquals(sketch.getRank("A", EXCLUSIVE), 0.0); assertEquals(sketch.getRank("B", EXCLUSIVE), 1.0); assertEquals(sketch.getRank("A", EXCLUSIVE), 0.0); assertEquals(sketch.getRank("B", EXCLUSIVE), 1.0); assertEquals(sketch.getRank("@", INCLUSIVE), 0.0); assertEquals(sketch.getRank("A", INCLUSIVE), 1.0); assertEquals(sketch.getMinItem(),"A"); assertEquals(sketch.getMaxItem(), "A"); assertEquals(sketch.getQuantile(0.5, EXCLUSIVE), "A"); assertEquals(sketch.getQuantile(0.5, INCLUSIVE), "A"); } @Test public void tenValues() { final String[] tenStr = {"A","B","C","D","E","F","G","H","I","J"}; final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final int strLen = tenStr.length; final double dblStrLen = strLen; for (int i = 1; i <= strLen; i++) { sketch.update(tenStr[i - 1]); } assertFalse(sketch.isEmpty()); assertEquals(sketch.getN(), strLen); assertEquals(sketch.getNumRetained(), strLen); for (int i = 1; i <= strLen; i++) { assertEquals(sketch.getRank(tenStr[i - 1], EXCLUSIVE), (i - 1) / dblStrLen); assertEquals(sketch.getRank(tenStr[i - 1], INCLUSIVE), i / dblStrLen); } final String[] qArr = tenStr; double[] rOut = sketch.getRanks(qArr); //inclusive for (int i = 0; i < qArr.length; i++) { assertEquals(rOut[i], (i + 1) / dblStrLen); } rOut = sketch.getRanks(qArr, EXCLUSIVE); //exclusive for (int i = 0; i < qArr.length; i++) { assertEquals(rOut[i], i / 10.0); } for (int i = 0; i <= strLen; i++) { final double rank = i/dblStrLen; String q = rank == 1.0 ? tenStr[i-1] : tenStr[i]; assertEquals(sketch.getQuantile(rank, EXCLUSIVE), q); q = rank == 0 ? tenStr[i] : tenStr[i - 1]; assertEquals(sketch.getQuantile(rank, INCLUSIVE), q); //ERROR } { // getQuantile() and getQuantiles() equivalence EXCLUSIVE final String[] quantiles = sketch.getQuantiles(new double[] {0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0}, EXCLUSIVE); for (int i = 0; i <= 10; i++) { assertEquals(sketch.getQuantile(i / 10.0, EXCLUSIVE), quantiles[i]); } } { // getQuantile() and getQuantiles() equivalence INCLUSIVE final String[] quantiles = sketch.getQuantiles(new double[] {0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1}, INCLUSIVE); for (int i = 0; i <= 10; i++) { assertEquals(sketch.getQuantile(i / 10.0, INCLUSIVE), quantiles[i]); } } } @Test public void manyValuesEstimationMode() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); final int n = 1_000_000; final int digits = Util.numDigits(n); for (int i = 1; i <= n; i++) { sketch.update(Util.longToFixedLengthString(i, digits)); assertEquals(sketch.getN(), i); } // test getRank for (int i = 1; i <= n; i++) { final double trueRank = (double) i / n; final String s = Util.longToFixedLengthString(i, digits); final double r = sketch.getRank(s); assertEquals(r, trueRank, PMF_EPS_FOR_K_256, "for value " + s); } // test getPMF final String s = Util.longToFixedLengthString(n/2, digits); final double[] pmf = sketch.getPMF(new String[] {s}); // split at median assertEquals(pmf.length, 2); assertEquals(pmf[0], 0.5, PMF_EPS_FOR_K_256); assertEquals(pmf[1], 0.5, PMF_EPS_FOR_K_256); assertEquals(sketch.getMinItem(), Util.longToFixedLengthString(1, digits)); assertEquals(sketch.getMaxItem(), Util.longToFixedLengthString(n, digits)); // check at every 0.1 percentage point final double[] fractions = new double[1001]; final double[] reverseFractions = new double[1001]; // check that ordering doesn't matter for (int i = 0; i <= 1000; i++) { fractions[i] = (double) i / 1000; reverseFractions[1000 - i] = fractions[i]; } final String[] quantiles = sketch.getQuantiles(fractions); final String[] reverseQuantiles = sketch.getQuantiles(reverseFractions); String previousQuantile = ""; for (int i = 0; i <= 1000; i++) { final String quantile = sketch.getQuantile(fractions[i]); assertEquals(quantile, quantiles[i]); assertEquals(quantile, reverseQuantiles[1000 - i]); assertTrue(Util.le(previousQuantile, quantile, Comparator.naturalOrder())); previousQuantile = quantile; } } @Test public void getRankGetCdfGetPmfConsistency() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); final int n = 1000; final int digits = Util.numDigits(n); final String[] quantiles = new String[n]; for (int i = 0; i < n; i++) { final String str = Util.longToFixedLengthString(i, digits); sketch.update(str); quantiles[i] = str; } { //EXCLUSIVE final double[] ranks = sketch.getCDF(quantiles, EXCLUSIVE); final double[] pmf = sketch.getPMF(quantiles, EXCLUSIVE); double sumPmf = 0; for (int i = 0; i < n; i++) { assertEquals(ranks[i], sketch.getRank(quantiles[i], EXCLUSIVE), NUMERIC_NOISE_TOLERANCE, "rank vs CDF for value " + i); sumPmf += pmf[i]; assertEquals(ranks[i], sumPmf, NUMERIC_NOISE_TOLERANCE, "CDF vs PMF for value " + i); } sumPmf += pmf[n]; assertEquals(sumPmf, 1.0, NUMERIC_NOISE_TOLERANCE); assertEquals(ranks[n], 1.0, NUMERIC_NOISE_TOLERANCE); } { // INCLUSIVE (default) final double[] ranks = sketch.getCDF(quantiles, INCLUSIVE); final double[] pmf = sketch.getPMF(quantiles, INCLUSIVE); double sumPmf = 0; for (int i = 0; i < n; i++) { assertEquals(ranks[i], sketch.getRank(quantiles[i], INCLUSIVE), NUMERIC_NOISE_TOLERANCE, "rank vs CDF for value " + i); sumPmf += pmf[i]; assertEquals(ranks[i], sumPmf, NUMERIC_NOISE_TOLERANCE, "CDF vs PMF for value " + i); } sumPmf += pmf[n]; assertEquals(sumPmf, 1.0, NUMERIC_NOISE_TOLERANCE); assertEquals(ranks[n], 1.0, NUMERIC_NOISE_TOLERANCE); } } @Test public void merge() { final KllItemsSketch<String> sketch1 = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); final KllItemsSketch<String> sketch2 = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); final int n = 10000; final int digits = Util.numDigits(2 * n); for (int i = 0; i < n; i++) { sketch1.update(Util.longToFixedLengthString(i, digits)); sketch2.update(Util.longToFixedLengthString((2 * n) - i - 1, digits)); } assertEquals(sketch1.getMinItem(), Util.longToFixedLengthString(0, digits)); assertEquals(sketch1.getMaxItem(), Util.longToFixedLengthString(n - 1, digits)); assertEquals(sketch2.getMinItem(), Util.longToFixedLengthString(n, digits)); assertEquals(sketch2.getMaxItem(), Util.longToFixedLengthString((2 * n) - 1, digits)); sketch1.merge(sketch2); assertFalse(sketch1.isEmpty()); assertEquals(sketch1.getN(), 2L * n); assertEquals(sketch1.getMinItem(), Util.longToFixedLengthString(0, digits)); assertEquals(sketch1.getMaxItem(), Util.longToFixedLengthString((2 * n) - 1, digits)); final String upperBound = Util.longToFixedLengthString(n + (int)ceil(n * PMF_EPS_FOR_K_256), digits); final String lowerBound = Util.longToFixedLengthString(n - (int)ceil(n * PMF_EPS_FOR_K_256), digits); final String median = sketch1.getQuantile(0.5); assertTrue(Util.le(median, upperBound, Comparator.naturalOrder())); assertTrue(Util.le(lowerBound, median, Comparator.naturalOrder())); } @Test public void mergeLowerK() { final KllItemsSketch<String> sketch1 = KllItemsSketch.newHeapInstance(256, Comparator.naturalOrder(), serDe); final KllItemsSketch<String> sketch2 = KllItemsSketch.newHeapInstance(128, Comparator.naturalOrder(), serDe); final int n = 10000; final int digits = Util.numDigits(2 * n); for (int i = 0; i < n; i++) { sketch1.update(Util.longToFixedLengthString(i, digits)); sketch2.update(Util.longToFixedLengthString((2 * n) - i - 1, digits)); } assertEquals(sketch1.getMinItem(), Util.longToFixedLengthString(0, digits)); assertEquals(sketch1.getMaxItem(), Util.longToFixedLengthString(n - 1, digits)); assertEquals(sketch2.getMinItem(), Util.longToFixedLengthString(n, digits)); assertEquals(sketch2.getMaxItem(), Util.longToFixedLengthString((2 * n) - 1, digits)); assertTrue(sketch1.getNormalizedRankError(false) < sketch2.getNormalizedRankError(false)); assertTrue(sketch1.getNormalizedRankError(true) < sketch2.getNormalizedRankError(true)); sketch1.merge(sketch2); // sketch1 must get "contaminated" by the lower K in sketch2 assertEquals(sketch1.getNormalizedRankError(false), sketch2.getNormalizedRankError(false)); assertEquals(sketch1.getNormalizedRankError(true), sketch2.getNormalizedRankError(true)); assertFalse(sketch1.isEmpty()); assertEquals(sketch1.getN(), 2 * n); assertEquals(sketch1.getMinItem(), Util.longToFixedLengthString(0, digits)); assertEquals(sketch1.getMaxItem(), Util.longToFixedLengthString((2 * n) - 1, digits)); final String upperBound = Util.longToFixedLengthString(n + (int)ceil(2 * n * PMF_EPS_FOR_K_128), digits); final String lowerBound = Util.longToFixedLengthString(n - (int)ceil(2 * n * PMF_EPS_FOR_K_128), digits); final String median = sketch1.getQuantile(0.5); assertTrue(Util.le(median, upperBound, Comparator.naturalOrder())); assertTrue(Util.le(lowerBound, median, Comparator.naturalOrder())); } @Test public void mergeEmptyLowerK() { final KllItemsSketch<String> sketch1 = KllItemsSketch.newHeapInstance(256, Comparator.naturalOrder(), serDe); final KllItemsSketch<String> sketch2 = KllItemsSketch.newHeapInstance(128, Comparator.naturalOrder(), serDe); final int n = 10000; final int digits = Util.numDigits(n); for (int i = 0; i < n; i++) { sketch1.update(Util.longToFixedLengthString(i, digits)); //sketch2 is empty } // rank error should not be affected by a merge with an empty sketch with lower K final double rankErrorBeforeMerge = sketch1.getNormalizedRankError(true); sketch1.merge(sketch2); assertEquals(sketch1.getNormalizedRankError(true), rankErrorBeforeMerge); { assertFalse(sketch1.isEmpty()); assertTrue(sketch2.isEmpty()); assertEquals(sketch1.getN(), n); assertEquals(sketch1.getMinItem(), Util.longToFixedLengthString(0, digits)); assertEquals(sketch1.getMaxItem(), Util.longToFixedLengthString(n - 1, digits)); final String upperBound = Util.longToFixedLengthString((n / 2) + (int)ceil(n * PMF_EPS_FOR_K_256), digits); final String lowerBound = Util.longToFixedLengthString((n / 2) - (int)ceil(n * PMF_EPS_FOR_K_256), digits); final String median = sketch1.getQuantile(0.5); assertTrue(Util.le(median, upperBound, Comparator.naturalOrder())); assertTrue(Util.le(lowerBound, median, Comparator.naturalOrder())); } { //merge the other way sketch2.merge(sketch1); assertFalse(sketch1.isEmpty()); assertFalse(sketch2.isEmpty()); assertEquals(sketch1.getN(), n); assertEquals(sketch2.getN(), n); assertEquals(sketch1.getMinItem(), Util.longToFixedLengthString(0, digits)); assertEquals(sketch1.getMaxItem(), Util.longToFixedLengthString(n - 1, digits)); assertEquals(sketch2.getMinItem(), Util.longToFixedLengthString(0, digits)); assertEquals(sketch2.getMaxItem(), Util.longToFixedLengthString(n - 1, digits)); final String upperBound = Util.longToFixedLengthString((n / 2) + (int)ceil(n * PMF_EPS_FOR_K_128), digits); final String lowerBound = Util.longToFixedLengthString((n / 2) - (int)ceil(n * PMF_EPS_FOR_K_128), digits); final String median = sketch2.getQuantile(0.5); assertTrue(Util.le(median, upperBound, Comparator.naturalOrder())); assertTrue(Util.le(lowerBound, median, Comparator.naturalOrder())); } } @Test public void mergeExactModeLowerK() { final KllItemsSketch<String> sketch1 = KllItemsSketch.newHeapInstance(256, Comparator.naturalOrder(), serDe); final KllItemsSketch<String> sketch2 = KllItemsSketch.newHeapInstance(128, Comparator.naturalOrder(), serDe); final int n = 10000; final int digits = Util.numDigits(n); for (int i = 0; i < n; i++) { sketch1.update(Util.longToFixedLengthString(i, digits)); } sketch2.update(Util.longToFixedLengthString(1, digits)); // rank error should not be affected by a merge with a sketch in exact mode with lower K final double rankErrorBeforeMerge = sketch1.getNormalizedRankError(true); sketch1.merge(sketch2); assertEquals(sketch1.getNormalizedRankError(true), rankErrorBeforeMerge); } @Test public void mergeMinMinValueFromOther() { final KllItemsSketch<String> sketch1 = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); final KllItemsSketch<String> sketch2 = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); sketch1.update(Util.longToFixedLengthString(1, 1)); sketch2.update(Util.longToFixedLengthString(2, 1)); sketch2.merge(sketch1); assertEquals(sketch2.getMinItem(), Util.longToFixedLengthString(1, 1)); } @Test public void mergeMinAndMaxFromOther() { final KllItemsSketch<String> sketch1 = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); final KllItemsSketch<String> sketch2 = KllItemsSketch.newHeapInstance(10, Comparator.naturalOrder(), serDe); final int n = 1_000_000; final int digits = Util.numDigits(n); for (int i = 1; i <= 1_000_000; i++) { sketch1.update(Util.longToFixedLengthString(i, digits)); //sketch2 is empty } sketch2.merge(sketch1); assertEquals(sketch2.getMinItem(), Util.longToFixedLengthString(1, digits)); assertEquals(sketch2.getMaxItem(), Util.longToFixedLengthString(n, digits)); } @Test(expectedExceptions = SketchesArgumentException.class) public void kTooSmall() { KllItemsSketch.newHeapInstance(KllSketch.DEFAULT_M - 1, Comparator.naturalOrder(), serDe); } @Test(expectedExceptions = SketchesArgumentException.class) public void kTooLarge() { KllItemsSketch.newHeapInstance(KllSketch.MAX_K + 1, Comparator.naturalOrder(), serDe); } @Test public void minK() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(KllSketch.DEFAULT_M,Comparator.naturalOrder(), serDe); final int n = 1000; final int digits = Util.numDigits(n); for (int i = 0; i < n; i++) { sketch.update(Util.longToFixedLengthString(i, digits)); } assertEquals(sketch.getK(), KllSketch.DEFAULT_M); final String upperBound = Util.longToFixedLengthString((n / 2) + (int)ceil(n * PMF_EPS_FOR_K_8), digits); final String lowerBound = Util.longToFixedLengthString((n / 2) - (int)ceil(n * PMF_EPS_FOR_K_8), digits); final String median = sketch.getQuantile(0.5); assertTrue(Util.le(median, upperBound, Comparator.naturalOrder())); assertTrue(Util.le(lowerBound, median, Comparator.naturalOrder())); } @Test public void maxK() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(KllSketch.MAX_K,Comparator.naturalOrder(), serDe); final int n = 1000; final int digits = Util.numDigits(n); for (int i = 0; i < n; i++) { sketch.update(Util.longToFixedLengthString(i, digits)); } assertEquals(sketch.getK(), KllSketch.MAX_K); final String upperBound = Util.longToFixedLengthString((n / 2) + (int)ceil(n * PMF_EPS_FOR_K_256), digits); final String lowerBound = Util.longToFixedLengthString((n / 2) - (int)ceil(n * PMF_EPS_FOR_K_256), digits); final String median = sketch.getQuantile(0.5); assertTrue(Util.le(median, upperBound, Comparator.naturalOrder())); assertTrue(Util.le(lowerBound, median, Comparator.naturalOrder())); } @Test(expectedExceptions = SketchesArgumentException.class) public void outOfOrderSplitPoints() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); final String s0 = Util.longToFixedLengthString(0, 1); final String s1 = Util.longToFixedLengthString(1, 1); sketch.update(s0); sketch.getCDF(new String[] {s1, s0}); } @Test(expectedExceptions = SketchesArgumentException.class) public void nullSplitPoint() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(Comparator.naturalOrder(), serDe); sketch.update(Util.longToFixedLengthString(0, 1)); sketch.getCDF(new String[] {null}); } @Test public void checkReset() { final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final int n = 100; final int digits = Util.numDigits(n); for (int i = 1; i <= n; i++) { sketch.update(Util.longToFixedLengthString(i, digits)); } final long n1 = sketch.getN(); final String min1 = sketch.getMinItem(); final String max1 = sketch.getMaxItem(); sketch.reset(); for (int i = 1; i <= 100; i++) { sketch.update(Util.longToFixedLengthString(i, digits)); } final long n2 = sketch.getN(); final String min2 = sketch.getMinItem(); final String max2 = sketch.getMaxItem(); assertEquals(n2, n1); assertEquals(min2, min1); assertEquals(max2, max1); } @Test public void checkReadOnlyUpdate() { final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()); final KllItemsSketch<String> sk2 = KllItemsSketch.wrap(seg, Comparator.naturalOrder(), serDe); try { sk2.update("A"); fail(); } catch (final SketchesArgumentException e) { } } @Test public void checkNewDirectInstanceAndSmallSize() { final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()); KllItemsSketch<String> sk2 = KllItemsSketch.wrap(seg, Comparator.naturalOrder(), serDe); int sizeBytes = sk2.currentSerializedSizeBytes(false); assertEquals(sizeBytes, 8); sk1.update("A"); seg = MemorySegment.ofArray(sk1.toByteArray()); sk2 = KllItemsSketch.wrap(seg, Comparator.naturalOrder(), serDe); sizeBytes = sk2.currentSerializedSizeBytes(false); assertEquals(sizeBytes, 8 + 5); sk1.update("B"); seg = MemorySegment.ofArray(sk1.toByteArray()); sk2 = KllItemsSketch.wrap(seg, Comparator.naturalOrder(), serDe); sizeBytes = sk2.currentSerializedSizeBytes(false); assertEquals(sizeBytes, 20 + 4 + (2 * 5) + (2 * 5)); } @Test public void sortedView() { final KllItemsSketch<String> sk = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); sk.update("A"); sk.update("AB"); sk.update("ABC"); final GenericSortedView<String> view = sk.getSortedView(); final GenericSortedViewIterator<String> itr = view.iterator(); assertEquals(itr.next(), true); assertEquals(itr.getQuantile(), "A"); assertEquals(itr.getWeight(), 1); assertEquals(itr.getNaturalRank(EXCLUSIVE), 0); assertEquals(itr.getNaturalRank(INCLUSIVE), 1); assertEquals(itr.next(), true); assertEquals(itr.getQuantile(), "AB"); assertEquals(itr.getWeight(), 1); assertEquals(itr.getNaturalRank(EXCLUSIVE), 1); assertEquals(itr.getNaturalRank(INCLUSIVE), 2); assertEquals(itr.next(), true); assertEquals(itr.getQuantile(), "ABC"); assertEquals(itr.getWeight(), 1); assertEquals(itr.getNaturalRank(EXCLUSIVE), 2); assertEquals(itr.getNaturalRank(INCLUSIVE), 3); assertEquals(itr.next(), false); } @Test //also visual public void checkCDF_PDF() { final double[] cdfI = {.25, .50, .75, 1.0, 1.0 }; final double[] cdfE = {0.0, .25, .50, .75, 1.0 }; final double[] pmfI = {.25, .25, .25, .25, 0.0 }; final double[] pmfE = {0.0, .25, .25, .25, .25 }; final double toll = 1E-10; final KllItemsSketch<String> sketch = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final String[] strIn = {"A", "AB", "ABC", "ABCD"}; for (int i = 0; i < strIn.length; i++) { sketch.update(strIn[i]); } final String[] sp = {"A", "AB", "ABC", "ABCD"}; println("SplitPoints:"); for (int i = 0; i < sp.length; i++) { printf("%10s", sp[i]); } println(""); println("INCLUSIVE:"); double[] cdf = sketch.getCDF(sp, INCLUSIVE); double[] pmf = sketch.getPMF(sp, INCLUSIVE); printf("%10s%10s\n", "CDF", "PMF"); for (int i = 0; i < cdf.length; i++) { printf("%10.2f%10.2f\n", cdf[i], pmf[i]); assertEquals(cdf[i], cdfI[i], toll); assertEquals(pmf[i], pmfI[i], toll); } println("EXCLUSIVE"); cdf = sketch.getCDF(sp, EXCLUSIVE); pmf = sketch.getPMF(sp, EXCLUSIVE); printf("%10s%10s\n", "CDF", "PMF"); for (int i = 0; i < cdf.length; i++) { printf("%10.2f%10.2f\n", cdf[i], pmf[i]); assertEquals(cdf[i], cdfE[i], toll); assertEquals(pmf[i], pmfE[i], toll); } } @Test public void checkWrapCase1Items() { final KllItemsSketch<String> sk = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final int n = 21; final int digits = Util.numDigits(n); for (int i = 1; i <= n; i++) { sk.update(Util.longToFixedLengthString(i, digits)); } final MemorySegment seg = MemorySegment.ofArray(sk.toByteArray()).asReadOnly(); final KllItemsSketch<String> sk2 = KllItemsSketch.wrap(seg, Comparator.naturalOrder(), serDe); assertTrue(seg.isReadOnly()); assertTrue(sk2.isReadOnly()); assertFalse(sk2.isOffHeap()); } @Test public void checkReadOnlyExceptions() { final int[] intArr = {}; final int intV = 2; final int idx = 1; final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()); final KllItemsSketch<String> sk2 = KllItemsSketch.wrap(seg, Comparator.naturalOrder(), serDe); try { sk2.setLevelsArray(intArr); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setLevelsArrayAt(idx,intV); fail(); } catch (final SketchesArgumentException e) { } } @Test public void checkIsSameResource() { final int cap = 128; final MemorySegment wseg = MemorySegment.ofArray(new byte[cap]); //heap final MemorySegment slice1 = wseg.asSlice(0, 64); final MemorySegment slice2 = wseg.asSlice(64, 64); assertFalse(slice1 == slice2); assertFalse(MemorySegmentStatus.isSameResource(slice1, slice2)); //same original resource, but different offsets final MemorySegment slice3 = wseg.asSlice(0, 64); assertFalse(slice1 == slice3); assertTrue(MemorySegmentStatus.isSameResource(slice1, slice3)); //same original resource, same offsets, different views. slice1.set(JAVA_INT_UNALIGNED, 0, -1); assertEquals(-1, slice3.get(JAVA_INT_UNALIGNED, 0)); //proof slice1.set(JAVA_INT_UNALIGNED, 0, 0); final byte[] byteArr1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe).toByteArray(); MemorySegment.copy(byteArr1, 0, slice1, JAVA_BYTE, 0, byteArr1.length); final KllItemsSketch<String> sk1 = KllItemsSketch.wrap(slice1, Comparator.naturalOrder(), serDe); final byte[] byteArr2 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe).toByteArray(); MemorySegment.copy(byteArr2, 0, slice2, JAVA_BYTE, 0, byteArr2.length); assertFalse(sk1.isSameResource(slice2)); //same original resource, but different offsets final byte[] byteArr3 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe).toByteArray(); MemorySegment.copy(byteArr3, 0, slice3, JAVA_BYTE, 0, byteArr3.length); assertTrue(sk1.isSameResource(slice3)); //same original resource, same offsets, different views. } // New added tests specially for KllItemsSketch @Test public void checkHeapifyEmpty() { final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()); final KllMemorySegmentValidate segVal = new KllMemorySegmentValidate(seg, SketchType.ITEMS_SKETCH, serDe); assertEquals(segVal.sketchStructure, COMPACT_EMPTY); assertEquals(seg.byteSize(), 8); final KllItemsSketch<String> sk2 = KllItemsSketch.heapify(seg, Comparator.naturalOrder(), serDe); assertEquals(sk2.sketchStructure, UPDATABLE); assertEquals(sk2.getN(), 0); assertFalse(sk2.isReadOnly()); try { sk2.getMinItem(); fail(); } catch (final SketchesArgumentException e) { } try { sk2.getMaxItem(); fail(); } catch (final SketchesArgumentException e) { } println(sk1.toString(true, true)); println(""); println(KllPreambleUtil.toString(seg, ITEMS_SKETCH, true, serDe)); } @Test public void checkHeapifySingleItem() { final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); sk1.update("A"); final MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()); final KllMemorySegmentValidate segVal = new KllMemorySegmentValidate(seg, SketchType.ITEMS_SKETCH, serDe); assertEquals(segVal.sketchStructure, COMPACT_SINGLE); assertEquals(seg.byteSize(), segVal.sketchBytes); final KllItemsSketch<String> sk2 = KllItemsSketch.heapify(seg, Comparator.naturalOrder(), serDe); assertEquals(sk2.sketchStructure, UPDATABLE); assertEquals(sk2.getN(), 1); assertFalse(sk2.isReadOnly()); assertEquals(sk2.getMinItem(), "A"); assertEquals(sk2.getMaxItem(), "A"); println(sk1.toString(true, true)); println(""); println(KllPreambleUtil.toString(seg, ITEMS_SKETCH, true, serDe)); } @Test public void checkHeapifyFewItems() { final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); sk1.update("A"); sk1.update("AB"); sk1.update("ABC"); final MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()); final KllMemorySegmentValidate segVal = new KllMemorySegmentValidate(seg, SketchType.ITEMS_SKETCH, serDe); assertEquals(segVal.sketchStructure, COMPACT_FULL); assertEquals(seg.byteSize(), segVal.sketchBytes); println(sk1.toString(true, true)); println(""); println(KllPreambleUtil.toString(seg, ITEMS_SKETCH, true, serDe)); } @Test public void checkHeapifyManyItems() { final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final int n = 109; final int digits = Util.numDigits(n); for (int i = 1; i <= n; i++) { sk1.update(Util.longToFixedLengthString(i, digits)); } final MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()); final KllMemorySegmentValidate segVal = new KllMemorySegmentValidate(seg, SketchType.ITEMS_SKETCH, serDe); assertEquals(segVal.sketchStructure, COMPACT_FULL); assertEquals(seg.byteSize(), segVal.sketchBytes); println(sk1.toString(true, true)); println(""); println(KllPreambleUtil.toString(seg, ITEMS_SKETCH, true, serDe)); } @Test public void checkWrapCausingLevelsCompaction() { final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); final int n = 109; final int digits = Util.numDigits(n); for (int i = 1; i <= n; i++) { sk1.update(Util.longToFixedLengthString(i, digits)); } final MemorySegment seg = MemorySegment.ofArray(sk1.toByteArray()).asReadOnly(); final KllItemsSketch<String> sk2 = KllItemsSketch.wrap(seg, Comparator.naturalOrder(), serDe); assertTrue(seg.isReadOnly()); assertTrue(sk2.isReadOnly()); assertFalse(sk2.isOffHeap()); //not off-heap println(sk1.toString(true, true)); println(""); println(sk2.toString(true, true)); println(""); println(KllPreambleUtil.toString(seg, ITEMS_SKETCH, true, serDe)); } @Test public void checkExceptions() { final KllItemsSketch<String> sk = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); try { sk.getTotalItemsByteArr(); fail(); } catch (final SketchesArgumentException e) { } try { sk.getTotalItemsNumBytes(); fail(); } catch (final SketchesArgumentException e) { } try { sk.setMemorySegment(null); fail(); } catch (final SketchesArgumentException e) { } final byte[] byteArr = sk.toByteArray(); final KllItemsSketch<String> sk2 = KllItemsSketch.wrap(MemorySegment.ofArray(byteArr), Comparator.naturalOrder(), serDe); try { sk2.incN(1); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setItemsArray(null); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setItemsArrayAt(0, null); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setLevelZeroSorted(false); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setMaxItem(null); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setMinItem(null); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setMinK(0); fail(); } catch (final SketchesArgumentException e) { } try { sk2.setN(0); fail(); } catch (final SketchesArgumentException e) { } } @Test public void checkSortedViewAfterReset() { final KllItemsSketch<String> sk = KllItemsSketch.newHeapInstance(20, Comparator.naturalOrder(), serDe); sk.update("1"); final GenericSortedView<String> sv = sk.getSortedView(); final String ssv = sv.getQuantile(1.0, INCLUSIVE); assertEquals(ssv, "1"); sk.reset(); try { sk.getSortedView(); fail(); } catch (final SketchesArgumentException e) { } } @Test //There is no guarantee that L0 is sorted after a merge. //The issue is, during a merge, L0 must be sorted prior to a compaction to a higher level. //Otherwise the higher levels would not be sorted properly. public void checkL0SortDuringMergeIssue527() throws NumberFormatException { final Random rand = new Random(); final KllItemsSketch<String> sk1 = KllItemsSketch.newHeapInstance(8, Comparator.reverseOrder(), serDe); final KllItemsSketch<String> sk2 = KllItemsSketch.newHeapInstance(8, Comparator.reverseOrder(), serDe); final int n = 26; //don't change this for (int i = 1; i <= n; i++ ) { final int j = rand.nextInt(n) + 1; sk1.update(getString(j, 3)); sk2.update(getString(j +100, 3)); } sk1.merge(sk2); println(sk1.toString(true, true)); //L1 and above should be sorted in reverse. Ignore L0. final int lvl1size = sk1.levelsArr[2] - sk1.levelsArr[1]; final QuantilesGenericSketchIterator<String> itr = sk1.iterator(); itr.next(); int prev = Integer.parseInt(itr.getQuantile().trim()); for (int i = 1; i < lvl1size; i++) { if (itr.next()) { final int v = Integer.parseInt(itr.getQuantile().trim()); assertTrue(v <= prev); prev = v; } } } private final static boolean enablePrinting = false; /** * @param format the format * @param args the args */ private static final void printf(final String format, final Object ...args) { if (enablePrinting) { System.out.printf(format, args); } } /** * @param o the Object to println */ private static final void println(final Object o) { if (enablePrinting) { System.out.println(o.toString()); } } }
oracle/graal
36,924
truffle/src/com.oracle.truffle.dsl.processor/src/com/oracle/truffle/dsl/processor/bytecode/model/BytecodeDSLBuiltins.java
/* * Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.dsl.processor.bytecode.model; import static com.oracle.truffle.dsl.processor.bytecode.model.InstructionModel.OPCODE_WIDTH; import java.util.List; import javax.lang.model.type.TypeMirror; import com.oracle.truffle.dsl.processor.ProcessorContext; import com.oracle.truffle.dsl.processor.TruffleTypes; import com.oracle.truffle.dsl.processor.bytecode.model.InstructionModel.ImmediateKind; import com.oracle.truffle.dsl.processor.bytecode.model.InstructionModel.InstructionKind; import com.oracle.truffle.dsl.processor.bytecode.model.OperationModel.OperationArgument; import com.oracle.truffle.dsl.processor.bytecode.model.OperationModel.OperationArgument.Encoding; import com.oracle.truffle.dsl.processor.bytecode.model.OperationModel.OperationKind; import com.oracle.truffle.dsl.processor.java.model.CodeTypeMirror.ArrayCodeTypeMirror; /** * Helper class that initializes a {@link BytecodeDSLModel} with all of the Bytecode DSL builtins. * * The user guide should be updated when new builtin operations are added. */ public class BytecodeDSLBuiltins { private static final String GENERATE_BYTECODE = "com.oracle.truffle.api.bytecode.GenerateBytecode"; public static void addBuiltins(BytecodeDSLModel m, TruffleTypes types, ProcessorContext context) { m.popInstruction = m.instruction(InstructionKind.POP, "pop", m.signature(void.class, Object.class)); m.dupInstruction = m.instruction(InstructionKind.DUP, "dup", m.signature(void.class)); m.returnInstruction = m.instruction(InstructionKind.RETURN, "return", m.signature(void.class, Object.class)); m.branchInstruction = m.instruction(InstructionKind.BRANCH, "branch", m.signature(void.class)) // .addImmediate(ImmediateKind.BYTECODE_INDEX, "branch_target"); m.branchBackwardInstruction = m.instruction(InstructionKind.BRANCH_BACKWARD, "branch.backward", m.signature(void.class)) // .addImmediate(ImmediateKind.BYTECODE_INDEX, "branch_target") // .addImmediate(ImmediateKind.BRANCH_PROFILE, "loop_header_branch_profile"); m.branchFalseInstruction = m.instruction(InstructionKind.BRANCH_FALSE, "branch.false", m.signature(void.class, Object.class)) // .addImmediate(ImmediateKind.BYTECODE_INDEX, "branch_target") // .addImmediate(ImmediateKind.BRANCH_PROFILE, "branch_profile"); m.throwInstruction = m.instruction(InstructionKind.THROW, "throw", m.signature(void.class, Object.class)); m.loadConstantInstruction = m.instruction(InstructionKind.LOAD_CONSTANT, "load.constant", m.signature(Object.class)) // .addImmediate(ImmediateKind.CONSTANT, "constant"); m.loadNullInstruction = m.instruction(InstructionKind.LOAD_NULL, "load.null", m.signature(Object.class)); m.blockOperation = m.operation(OperationKind.BLOCK, "Block", """ Block is a grouping operation that executes each child in its body sequentially, producing the result of the last child (if any). This operation can be used to group multiple operations together in a single operation. The result of a Block is the result produced by the last child (or void, if no value is produced). """) // .setTransparent(true) // .setVariadic(true, 0) // .setDynamicOperands(transparentOperationChild()); m.rootOperation = m.operation(OperationKind.ROOT, "Root", rootOperationJavadoc(m)) // .setTransparent(true) // .setVariadic(true, 0) // .setDynamicOperands(transparentOperationChild()); m.ifThenOperation = m.operation(OperationKind.IF_THEN, "IfThen", """ IfThen implements an if-then statement. It evaluates {@code condition}, which must produce a boolean. If the value is {@code true}, it executes {@code thens}. This is a void operation; {@code thens} can also be void. """) // .setVoid(true) // .setDynamicOperands(child("condition"), voidableChild("thens")); m.ifThenElseOperation = m.operation(OperationKind.IF_THEN_ELSE, "IfThenElse", """ IfThenElse implements an if-then-else statement. It evaluates {@code condition}, which must produce a boolean. If the value is {@code true}, it executes {@code thens}; otherwise, it executes {@code elses}. This is a void operation; both {@code thens} and {@code elses} can also be void. """) // .setVoid(true) // .setDynamicOperands(child("condition"), voidableChild("thens"), voidableChild("elses")); m.conditionalOperation = m.operation(OperationKind.CONDITIONAL, "Conditional", """ Conditional implements a conditional expression (e.g., {@code condition ? thens : elses} in Java). It has the same semantics as IfThenElse, except it produces the value of the conditionally-executed child. """) // .setDynamicOperands(child("condition"), child("thens"), child("elses")); m.whileOperation = m.operation(OperationKind.WHILE, "While", """ While implements a while loop. It evaluates {@code condition}, which must produce a boolean. If the value is {@code true}, it executes {@code body} and repeats. This is a void operation; {@code body} can also be void. """) // .setVoid(true) // .setDynamicOperands(child("condition"), voidableChild("body")); m.tryCatchOperation = m.operation(OperationKind.TRY_CATCH, "TryCatch", """ TryCatch implements an exception handler. It executes {@code try}, and if a Truffle exception is thrown, it executes {@code catch}. The exception can be accessed within the {@code catch} operation using LoadException. Unlike a Java try-catch, this operation does not filter the exception based on type. This is a void operation; both {@code try} and {@code catch} can also be void. """) // .setVoid(true) // .setDynamicOperands(voidableChild("try"), voidableChild("catch")); TypeMirror finallyGeneratorType = context.getDeclaredType(Runnable.class); m.tryFinallyOperation = m.operation(OperationKind.TRY_FINALLY, "TryFinally", """ TryFinally implements a finally handler. It executes {@code try}, and after execution finishes it always executes {@code finally}. If {@code try} finishes normally, {@code finally} executes and control continues after the TryFinally operation. If {@code try} finishes exceptionally, {@code finally} executes and then rethrows the exception. If {@code try} finishes with a control flow operation, {@code finally} executes and then the control flow operation continues (i.e., a Branch will branch, a Return will return). <p> Unlike other child operations, {@code finally} is emitted multiple times in the bytecode (once for each regular, exceptional, and early control flow exit). To facilitate this, the {@code finally} operation is specified by a {@code finallyGenerator} that can be invoked multiple times. It should be repeatable and not have side effects. <p> This is a void operation; either of {@code try} or {@code finally} can be void. """) // .setVoid(true) // .setOperationBeginArguments(new OperationArgument(finallyGeneratorType, Encoding.FINALLY_GENERATOR, "finallyGenerator", "an idempotent Runnable that generates the {@code finally} operation using builder calls") // ).setDynamicOperands(voidableChild("try")); m.tryCatchOtherwiseOperation = m.operation(OperationKind.TRY_CATCH_OTHERWISE, "TryCatchOtherwise", """ TryCatchOtherwise implements a try block with different handling for regular and exceptional behaviour. It executes {@code try} and then one of the handlers. If {@code try} finishes normally, {@code otherwise} executes and control continues after the TryCatchOtherwise operation. If {@code try} finishes exceptionally, {@code catch} executes. The exception can be accessed using LoadException. Control continues after the TryCatchOtherwise operation. If {@code try} finishes with a control flow operation, {@code otherwise} executes and then the control flow operation continues (i.e., a Branch will branch, a Return will return). <p> Unlike other child operations, {@code otherwise} is emitted multiple times in the bytecode (once for each regular and early control flow exit). To facilitate this, the {@code otherwise} operation is specified by an {@code otherwiseGenerator} that can be invoked multiple times. It should be repeatable and not have side effects. <p> This operation is effectively a TryFinally operation with a specialized handler for the exception case. It does <strong>not</strong> implement try-catch-finally semantics: if an exception is thrown {@code catch} executes and {@code otherwise} does not. In pseudocode, it implements: <pre> try { tryOperation } finally { if (exceptionThrown) { catchOperation } else { otherwiseOperation } } </pre> <p> This is a void operation; any of {@code try}, {@code catch}, or {@code otherwise} can be void. """) // .setVoid(true) // .setOperationBeginArguments(new OperationArgument(finallyGeneratorType, Encoding.FINALLY_GENERATOR, "otherwiseGenerator", "an idempotent Runnable that generates the {@code otherwise} operation using builder calls") // ).setDynamicOperands(voidableChild("try"), voidableChild("catch")); m.finallyHandlerOperation = m.operation(OperationKind.FINALLY_HANDLER, "FinallyHandler", """ FinallyHandler is an internal operation that has no stack effect. All finally generators execute within a FinallyHandler operation. Executing the generator emits new operations, but these operations should not affect the outer operation's child count/value validation. To accomplish this, FinallyHandler "hides" these operations by popping any produced values and omitting calls to beforeChild/afterChild. When walking the operation stack, we skip over operations above finallyOperationSp since they do not logically enclose the handler. """) // .setVoid(true) // .setVariadic(true, 0) // .setDynamicOperands(transparentOperationChild()) // .setOperationBeginArguments(new OperationArgument(context.getType(short.class), Encoding.SHORT, "finallyOperationSp", "the operation stack pointer for the finally operation that created the FinallyHandler")) // .setInternal(); m.operation(OperationKind.LABEL, "Label", """ Label assigns {@code label} the current location in the bytecode (so that it can be used as the target of a Branch). This is a void operation. <p> Each {@link BytecodeLabel} must be defined exactly once. It should be defined directly inside the same operation in which it is created (using {@link #createLabel}). """) // .setVoid(true) // .setOperationBeginArguments(new OperationArgument(types.BytecodeLabel, Encoding.LABEL, "label", "the label to define")); m.operation(OperationKind.BRANCH, "Branch", """ Branch performs a branch to {@code label}. This operation only supports unconditional forward branches; use IfThen and While to perform other kinds of branches. """) // .setVoid(true) // .setOperationBeginArguments(new OperationArgument(types.BytecodeLabel, Encoding.LABEL, "label", "the label to branch to")) // .setInstruction(m.branchInstruction); m.loadConstantOperation = m.operation(OperationKind.LOAD_CONSTANT, "LoadConstant", """ LoadConstant produces {@code constant}. The constant should be immutable, since it may be shared across multiple LoadConstant operations. """) // .setOperationBeginArguments(new OperationArgument(context.getType(Object.class), Encoding.CONSTANT, "constant", "the constant value to load")) // .setInstruction(m.loadConstantInstruction); m.loadNullOperation = m.operation(OperationKind.LOAD_NULL, "LoadNull", """ LoadNull produces a {@code null} value. """) // .setInstruction(m.loadNullInstruction); m.operation(OperationKind.LOAD_ARGUMENT, "LoadArgument", """ LoadArgument reads the argument at {@code index} from the frame. Throws {@link IndexOutOfBoundsException} if the index is out of bounds. """) // .setOperationBeginArguments(new OperationArgument(context.getType(int.class), Encoding.INTEGER, "index", "the index of the argument to load (must fit into a short)")) // .setInstruction(m.instruction(InstructionKind.LOAD_ARGUMENT, "load.argument", m.signature(Object.class))// .addImmediate(ImmediateKind.SHORT, "index")); m.operation(OperationKind.LOAD_EXCEPTION, "LoadException", """ LoadException reads the current exception from the frame. This operation is only permitted inside the {@code catch} operation of TryCatch and TryCatchOtherwise operations. """) // .setInstruction(m.instruction(InstructionKind.LOAD_EXCEPTION, "load.exception", m.signature(Object.class))// .addImmediate(ImmediateKind.STACK_POINTER, "exception_sp")); m.loadLocalOperation = m.operation(OperationKind.LOAD_LOCAL, "LoadLocal", String.format(""" LoadLocal reads {@code local} from the current frame. If a value has not been written to the local, LoadLocal %s. """, loadLocalUndefinedBehaviour(m))) // .setOperationBeginArguments(new OperationArgument(types.BytecodeLocal, Encoding.LOCAL, "local", "the local to load")) // .setInstruction(m.instruction(InstructionKind.LOAD_LOCAL, "load.local", m.signature(Object.class)) // .addImmediate(ImmediateKind.FRAME_INDEX, "frame_index")); m.storeLocalInstruction = m.instruction(InstructionKind.STORE_LOCAL, "store.local", m.signature(void.class, Object.class)) // .addImmediate(ImmediateKind.FRAME_INDEX, "frame_index"); m.storeLocalOperation = m.operation(OperationKind.STORE_LOCAL, "StoreLocal", """ StoreLocal writes the value produced by {@code value} into the {@code local} in the current frame. """) // .setVoid(true) // .setOperationBeginArguments(new OperationArgument(types.BytecodeLocal, Encoding.LOCAL, "local", "the local to store to")) // .setDynamicOperands(child("value")) // .setInstruction(m.storeLocalInstruction); if (m.enableMaterializedLocalAccesses) { m.loadLocalMaterializedOperation = m.operation(OperationKind.LOAD_LOCAL_MATERIALIZED, "LoadLocalMaterialized", String.format(""" LoadLocalMaterialized reads {@code local} from the materialized frame produced by {@code frame}. This operation can be used to read a local defined by the current root or an enclosing root. The local must belong to the materialized frame. It should also be in scope, otherwise the operation may produce unexpected values. The interpreter will validate the scope if the interpreter is configured to {@link %s#storeBytecodeIndexInFrame store the bytecode index in the frame}. """, GENERATE_BYTECODE)) // .setOperationBeginArguments(new OperationArgument(types.BytecodeLocal, Encoding.LOCAL, "local", "the local to load")) // .setDynamicOperands(child("frame")) // .setInstruction(m.instruction(InstructionKind.LOAD_LOCAL_MATERIALIZED, "load.local.mat", m.signature(Object.class, Object.class)) // .addImmediate(ImmediateKind.FRAME_INDEX, "frame_index") // .addImmediate(ImmediateKind.LOCAL_ROOT, "root_index")); m.storeLocalMaterializedOperation = m.operation(OperationKind.STORE_LOCAL_MATERIALIZED, "StoreLocalMaterialized", String.format(""" StoreLocalMaterialized writes the value produced by {@code value} into {@code local} in the materialized frame produced by {@code frame}. This operation can be used to store locals defined by the current root or an enclosing root. The local must belong to the materialized frame. It should also be in scope, otherwise the operation may produce unexpected values. The interpreter will validate the scope if the interpreter is configured to {@link %s#storeBytecodeIndexInFrame store the bytecode index in the frame}. """, GENERATE_BYTECODE)) // .setVoid(true) // .setOperationBeginArguments(new OperationArgument(types.BytecodeLocal, Encoding.LOCAL, "local", "the local to store to")) // .setDynamicOperands(child("frame"), child("value")) // .setInstruction(m.instruction(InstructionKind.STORE_LOCAL_MATERIALIZED, "store.local.mat", m.signature(void.class, Object.class, Object.class)) // .addImmediate(ImmediateKind.FRAME_INDEX, "frame_index") // .addImmediate(ImmediateKind.LOCAL_ROOT, "root_index")); } m.returnOperation = m.operation(OperationKind.RETURN, "Return", "Return returns the value produced by {@code result}.") // .setVoid(true) // .setDynamicOperands(child("result")) // .setInstruction(m.returnInstruction); if (m.enableYield) { m.yieldInstruction = m.instruction(InstructionKind.YIELD, "yield", m.signature(Object.class, Object.class)).addImmediate(ImmediateKind.CONSTANT, "location"); m.operation(OperationKind.YIELD, "Yield", """ Yield executes {@code value} and suspends execution at the given location, returning a {@link com.oracle.truffle.api.bytecode.ContinuationResult} containing the result. The caller can resume the continuation, which continues execution after the Yield. When resuming, the caller passes a value that becomes the value produced by the Yield. """) // .setDynamicOperands(child("value")).setInstruction(m.yieldInstruction); } m.sourceOperation = m.operation(OperationKind.SOURCE, "Source", """ Source associates the children in its {@code body} with {@code source}. Together with SourceSection, it encodes source locations for operations in the program. """) // .setTransparent(true) // .setVariadic(true, 0) // .setOperationBeginArguments(new OperationArgument(types.Source, Encoding.CONSTANT, "source", "the source object to associate with the enclosed operations")) // .setDynamicOperands(transparentOperationChild()); String sourceDoc = """ SourceSection associates the children in its {@code body} with the source section with the given character {@code index} and {@code length}. To specify an {@link Source#createUnavailableSection() unavailable source section}, provide {@code -1} for both arguments. This operation must be (directly or indirectly) enclosed within a Source operation. """; m.sourceSectionPrefixOperation = m.operation(OperationKind.SOURCE_SECTION, "SourceSectionPrefix", sourceDoc, "SourceSection") // .setTransparent(true) // .setVariadic(true, 0) // .setOperationBeginArguments( new OperationArgument(context.getType(int.class), Encoding.INTEGER, "index", "the starting character index of the source section, or -1 if the section is unavailable"), new OperationArgument(context.getType(int.class), Encoding.INTEGER, "length", "the length (in characters) of the source section, or -1 if the section is unavailable")) // .setDynamicOperands(transparentOperationChild()); m.sourceSectionSuffixOperation = m.operation(OperationKind.SOURCE_SECTION, "SourceSectionSuffix", sourceDoc, "SourceSection") // .setTransparent(true) // .setVariadic(true, 0) // .setOperationEndArguments( new OperationArgument(context.getType(int.class), Encoding.INTEGER, "index", "the starting character index of the source section, or -1 if the section is unavailable"), new OperationArgument(context.getType(int.class), Encoding.INTEGER, "length", "the length (in characters) of the source section, or -1 if the section is unavailable")) // .setDynamicOperands(transparentOperationChild()); if (m.enableTagInstrumentation) { m.tagEnterInstruction = m.instruction(InstructionKind.TAG_ENTER, "tag.enter", m.signature(void.class)); m.tagEnterInstruction.addImmediate(ImmediateKind.TAG_NODE, "tag"); m.tagLeaveValueInstruction = m.instruction(InstructionKind.TAG_LEAVE, "tag.leave", m.signature(Object.class, Object.class)); m.tagLeaveValueInstruction.addImmediate(ImmediateKind.TAG_NODE, "tag"); m.tagLeaveVoidInstruction = m.instruction(InstructionKind.TAG_LEAVE_VOID, "tag.leaveVoid", m.signature(Object.class)); m.tagLeaveVoidInstruction.addImmediate(ImmediateKind.TAG_NODE, "tag"); m.tagOperation = m.operation(OperationKind.TAG, "Tag", """ Tag associates {@code tagged} with the given tags. When the {@link BytecodeConfig} includes one or more of the given tags, the interpreter will automatically invoke instrumentation probes when entering/leaving {@code tagged}. """) // .setTransparent(true) // .setOperationBeginArgumentVarArgs(true) // .setOperationBeginArguments( new OperationArgument(new ArrayCodeTypeMirror(context.getDeclaredType(Class.class)), Encoding.TAGS, "newTags", "the tags to associate with the enclosed operations"))// .setDynamicOperands(voidableChild("tagged")) // .setOperationEndArguments( new OperationArgument(new ArrayCodeTypeMirror(context.getDeclaredType(Class.class)), Encoding.TAGS, "newTags", "the tags to associate with the enclosed operations"))// .setInstruction(m.tagLeaveValueInstruction); } m.clearLocalInstruction = m.instruction(InstructionKind.CLEAR_LOCAL, "clear.local", m.signature(void.class)); m.clearLocalInstruction.addImmediate(ImmediateKind.FRAME_INDEX, "frame_index"); m.sortInstructionsByKind(); } /* * Invoked when instructions are being finalized. Allows to conditionally add builtin * instructions depending on the almost final model. */ public static void addBuiltinsOnFinalize(BytecodeDSLModel m) { if (m.hasCustomVariadic) { m.loadVariadicInstruction = m.instruction(InstructionKind.LOAD_VARIADIC, "load.variadic", m.signature(void.class, Object.class)); m.createVariadicInstruction = m.instruction(InstructionKind.CREATE_VARIADIC, "create.variadic", m.signature(Object.class, Object.class)); m.emptyVariadicInstruction = m.instruction(InstructionKind.EMPTY_VARIADIC, "empty.variadic", m.signature(Object.class)); m.loadVariadicInstruction.addImmediate(ImmediateKind.INTEGER, "offset"); m.loadVariadicInstruction.addImmediate(ImmediateKind.SHORT, "count"); if (m.maximumVariadicOffset > 0) { m.createVariadicInstruction.addImmediate(ImmediateKind.INTEGER, "offset"); } m.createVariadicInstruction.addImmediate(ImmediateKind.INTEGER, "count"); if (m.hasVariadicReturn) { m.splatVariadicInstruction = m.instruction(InstructionKind.SPLAT_VARIADIC, "splat.variadic", m.signature(Object.class, Object.class)); m.splatVariadicInstruction.addImmediate(ImmediateKind.INTEGER, "offset"); m.splatVariadicInstruction.addImmediate(ImmediateKind.INTEGER, "count"); m.loadVariadicInstruction.addImmediate(ImmediateKind.SHORT, "merge_count"); m.createVariadicInstruction.addImmediate(ImmediateKind.SHORT, "merge_count"); } } if (m.enableTagInstrumentation && m.hasYieldOperation()) { m.tagYieldInstruction = m.instruction(InstructionKind.TAG_YIELD, "tag.yield", m.signature(Object.class, Object.class)); m.tagYieldInstruction.addImmediate(ImmediateKind.TAG_NODE, "tag"); for (OperationModel yieldOperation : m.getCustomYieldOperations()) { if (yieldOperation.instruction.signature.dynamicOperandCount == 0) { m.tagYieldNullInstruction = m.instruction(InstructionKind.TAG_YIELD_NULL, "tag.yieldNull", m.signature(void.class)); m.tagYieldNullInstruction.addImmediate(ImmediateKind.TAG_NODE, "tag"); break; } } m.tagResumeInstruction = m.instruction(InstructionKind.TAG_RESUME, "tag.resume", m.signature(void.class)); m.tagResumeInstruction.addImmediate(ImmediateKind.TAG_NODE, "tag"); } // invalidate instructions should be the last instructions to add as it they depend on the // length of all other instructions if (m.isBytecodeUpdatable()) { int maxLength = OPCODE_WIDTH; for (InstructionModel instruction : m.getInstructions()) { maxLength = Math.max(maxLength, instruction.getInstructionLength()); } // Allocate instructions with [0, 1, ..., maxLength - OPCODE_WIDTH] short immediates. int numShortImmediates = (maxLength - OPCODE_WIDTH) / 2; m.invalidateInstructions = new InstructionModel[numShortImmediates + 1]; for (int i = 0; i < numShortImmediates + 1; i++) { InstructionModel model = m.instruction(InstructionKind.INVALIDATE, "invalidate" + i, m.signature(void.class)); for (int j = 0; j < i; j++) { model.addImmediate(ImmediateKind.SHORT, "invalidated" + j); } m.invalidateInstructions[i] = model; } } } private static String rootOperationJavadoc(BytecodeDSLModel m) { String rootClass = m.templateType.getSimpleName().toString(); String innerRootBehaviour; if (m.enableMaterializedLocalAccesses) { innerRootBehaviour = "but the inner root <i>can</i> manipulate the outer root's locals\n" + "using materialized local accesses if the outer frame is provided to it"; } else { innerRootBehaviour = String.format("and it does not have access to the outer root's locals (if it needs\n" + "access to outer locals, consider {@link %s#enableMaterializedLocalAccesses enabling materialized local accesses})", GENERATE_BYTECODE); } return String.format( """ Each Root operation defines one function (i.e., a {@link %s}). It takes one or more children, which define the body of the function that executes when it is invoked. If control falls through to the end of the body without returning, instructions are inserted to implicitly return {@code null}. <p> A root operation is typically the outermost one. That is, a {@link BytecodeParser} should invoke {@link #beginRoot} first before using other builder methods to generate bytecode. The parser should invoke {@link #endRoot} to finish generating the {@link %s}. <p> A parser *can* nest this operation in Source and SourceSection operations in order to provide a {@link Node#getSourceSection source location} for the entire root node. The result of {@link Node#getSourceSection} on the generated root is undefined if there is no enclosing SourceSection operation. <p> This method can also be called inside of another root operation. Bytecode generation for the outer root node suspends until generation for the inner root node finishes. The inner root node is not lexically nested in the outer (you can invoke the inner root node independently), %s. Multiple root nodes can be obtained from the {@link BytecodeNodes} object in the order of their {@link #beginRoot} calls. """, rootClass, rootClass, innerRootBehaviour); } private static String loadLocalUndefinedBehaviour(BytecodeDSLModel m) { if (m.defaultLocalValue == null || m.defaultLocalValue.isEmpty()) { return "throws a {@link com.oracle.truffle.api.frame.FrameSlotTypeException}"; } else { return String.format("produces the default local value (%s)", m.defaultLocalValue); } } private static DynamicOperandModel child(String name) { return new DynamicOperandModel(List.of(name), false, false); } private static DynamicOperandModel voidableChild(String name) { return new DynamicOperandModel(List.of(name), true, false); } private static DynamicOperandModel transparentOperationChild() { return new DynamicOperandModel(List.of("body"), true, true); } }
apache/hbase
36,883
hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.IndexBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader; import org.apache.hadoop.hbase.io.hfile.NoOpIndexBlockEncoder.NoOpEncodedSeeker; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.MultiByteBuff; import org.apache.hadoop.hbase.nio.RefCnt; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.io.netty.util.ResourceLeakDetector; @RunWith(Parameterized.class) @Category({ IOTests.class, MediumTests.class }) public class TestHFileBlockIndex { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestHFileBlockIndex.class); @Parameters public static Collection<Object[]> compressionAlgorithms() { return HBaseCommonTestingUtil.COMPRESSION_ALGORITHMS_PARAMETERIZED; } public TestHFileBlockIndex(Compression.Algorithm compr) { this.compr = compr; } private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlockIndex.class); private static final Random RNG = new Random(); // This test depends on Random#setSeed private static final int NUM_DATA_BLOCKS = 1000; private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final int SMALL_BLOCK_SIZE = 4096; private static final int NUM_KV = 10000; private static FileSystem fs; private Path path; private long rootIndexOffset; private int numRootEntries; private int numLevels; private static final List<byte[]> keys = new ArrayList<>(); private final Compression.Algorithm compr; private byte[] firstKeyInFile; private Configuration conf; private static final int[] INDEX_CHUNK_SIZES = { 4096, 512, 384 }; private static final int[] EXPECTED_NUM_LEVELS = { 2, 3, 4 }; private static final int[] UNCOMPRESSED_INDEX_SIZES = { 19187, 21813, 23086 }; private static final boolean includesMemstoreTS = true; static { assert INDEX_CHUNK_SIZES.length == EXPECTED_NUM_LEVELS.length; assert INDEX_CHUNK_SIZES.length == UNCOMPRESSED_INDEX_SIZES.length; } @Before public void setUp() throws IOException { keys.clear(); firstKeyInFile = null; conf = TEST_UTIL.getConfiguration(); RNG.setSeed(2389757); // This test requires at least HFile format version 2. conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MAX_FORMAT_VERSION); fs = HFileSystem.get(conf); } @Test public void testBlockIndex() throws IOException { testBlockIndexInternals(false); clear(); testBlockIndexInternals(true); } private void writeDataBlocksAndCreateIndex(HFileBlock.Writer hbw, FSDataOutputStream outputStream, HFileBlockIndex.BlockIndexWriter biw) throws IOException { for (int i = 0; i < NUM_DATA_BLOCKS; ++i) { hbw.startWriting(BlockType.DATA).write(Bytes.toBytes(String.valueOf(RNG.nextInt(1000)))); long blockOffset = outputStream.getPos(); hbw.writeHeaderAndData(outputStream); byte[] firstKey = null; byte[] family = Bytes.toBytes("f"); byte[] qualifier = Bytes.toBytes("q"); for (int j = 0; j < 16; ++j) { byte[] k = new KeyValue(RandomKeyValueUtil.randomOrderedKey(RNG, i * 16 + j), family, qualifier, EnvironmentEdgeManager.currentTime(), KeyValue.Type.Put).getKey(); keys.add(k); if (j == 8) { firstKey = k; } } assertTrue(firstKey != null); if (firstKeyInFile == null) { firstKeyInFile = firstKey; } biw.addEntry(firstKey, blockOffset, hbw.getOnDiskSizeWithHeader()); writeInlineBlocks(hbw, outputStream, biw, false); } writeInlineBlocks(hbw, outputStream, biw, true); rootIndexOffset = biw.writeIndexBlocks(outputStream); outputStream.close(); } @Test public void testBlockIndexWithOffHeapBuffer() throws Exception { ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.PARANOID); path = new Path(TEST_UTIL.getDataTestDir(), "block_index_testBlockIndexWithOffHeapBuffer"); assertEquals(0, keys.size()); HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(true) .withIncludesMvcc(includesMemstoreTS).withIncludesTags(true).withCompression(compr) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM).build(); ByteBuffAllocator allocator = ByteBuffAllocator.create(TEST_UTIL.getConfiguration(), true); HFileBlock.Writer hbw = new HFileBlock.Writer(TEST_UTIL.getConfiguration(), null, meta, allocator, meta.getBlocksize()); FSDataOutputStream outputStream = fs.create(path); final AtomicInteger counter = new AtomicInteger(); RefCnt.detector.setLeakListener(new ResourceLeakDetector.LeakListener() { @Override public void onLeak(String s, String s1) { counter.incrementAndGet(); } }); long maxSize = NUM_DATA_BLOCKS * 1000; long blockSize = 1000; LruBlockCache cache = new LruBlockCache(maxSize, blockSize); CacheConfig cacheConfig = new CacheConfig(TEST_UTIL.getConfiguration(), null, cache, allocator); HFileBlockIndex.BlockIndexWriter biw = new HFileBlockIndex.BlockIndexWriter(hbw, cacheConfig, path.getName(), null); writeDataBlocksAndCreateIndex(hbw, outputStream, biw); System.gc(); Thread.sleep(1000); allocator.allocate(128 * 1024).release(); assertEquals(0, counter.get()); } private void clear() throws IOException { keys.clear(); firstKeyInFile = null; conf = TEST_UTIL.getConfiguration(); RNG.setSeed(2389757); // This test requires at least HFile format version 2. conf.setInt(HFile.FORMAT_VERSION_KEY, 3); fs = HFileSystem.get(conf); } private void testBlockIndexInternals(boolean useTags) throws IOException { path = new Path(TEST_UTIL.getDataTestDir(), "block_index_" + compr + useTags); writeWholeIndex(useTags); readIndex(useTags); } /** * A wrapper around a block reader which only caches the results of the last operation. Not * thread-safe. */ private static class BlockReaderWrapper implements HFile.CachingBlockReader { private HFileBlock.FSReader realReader; private long prevOffset; private long prevOnDiskSize; private boolean prevPread; private HFileBlock prevBlock; public int hitCount = 0; public int missCount = 0; public BlockReaderWrapper(HFileBlock.FSReader realReader) { this.realReader = realReader; } @Override public HFileBlock readBlock(long offset, long onDiskSize, boolean cacheBlock, boolean pread, boolean isCompaction, boolean updateCacheMetrics, BlockType expectedBlockType, DataBlockEncoding expectedDataBlockEncoding) throws IOException { return readBlock(offset, onDiskSize, cacheBlock, pread, isCompaction, updateCacheMetrics, expectedBlockType, expectedDataBlockEncoding, false); } @Override public HFileBlock readBlock(long offset, long onDiskSize, boolean cacheBlock, boolean pread, boolean isCompaction, boolean updateCacheMetrics, BlockType expectedBlockType, DataBlockEncoding expectedDataBlockEncoding, boolean cacheOnly) throws IOException { if (offset == prevOffset && onDiskSize == prevOnDiskSize && pread == prevPread) { hitCount += 1; return prevBlock; } missCount += 1; prevBlock = realReader.readBlockData(offset, onDiskSize, pread, false, true); prevOffset = offset; prevOnDiskSize = onDiskSize; prevPread = pread; return prevBlock; } } private void readIndex(boolean useTags) throws IOException { long fileSize = fs.getFileStatus(path).getLen(); LOG.info("Size of {}: {} compression={}", path, fileSize, compr.toString()); FSDataInputStream istream = fs.open(path); HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(true).withIncludesMvcc(includesMemstoreTS) .withIncludesTags(useTags).withCompression(compr).build(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, path).build(); HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(context, meta, ByteBuffAllocator.HEAP, conf); BlockReaderWrapper brw = new BlockReaderWrapper(blockReader); HFileBlockIndex.BlockIndexReader indexReader = new HFileBlockIndex.CellBasedKeyBlockIndexReader(CellComparatorImpl.COMPARATOR, numLevels); indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset, fileSize) .nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries); long prevOffset = -1; int i = 0; int expectedHitCount = 0; int expectedMissCount = 0; LOG.info("Total number of keys: " + keys.size()); for (byte[] key : keys) { assertTrue(key != null); assertTrue(indexReader != null); KeyValue.KeyOnlyKeyValue keyOnlyKey = new KeyValue.KeyOnlyKeyValue(key, 0, key.length); HFileBlock b = indexReader.seekToDataBlock(keyOnlyKey, null, true, true, false, null, brw); if ( PrivateCellUtil.compare(CellComparatorImpl.COMPARATOR, keyOnlyKey, firstKeyInFile, 0, firstKeyInFile.length) < 0 ) { assertTrue(b == null); ++i; continue; } String keyStr = "key #" + i + ", " + Bytes.toStringBinary(key); assertTrue("seekToDataBlock failed for " + keyStr, b != null); if (prevOffset == b.getOffset()) { assertEquals(++expectedHitCount, brw.hitCount); } else { LOG.info("First key in a new block: " + keyStr + ", block offset: " + b.getOffset() + ")"); assertTrue(b.getOffset() > prevOffset); assertEquals(++expectedMissCount, brw.missCount); prevOffset = b.getOffset(); } ++i; } istream.close(); } private void writeWholeIndex(boolean useTags) throws IOException { assertEquals(0, keys.size()); HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(true) .withIncludesMvcc(includesMemstoreTS).withIncludesTags(useTags).withCompression(compr) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM).build(); HFileBlock.Writer hbw = new HFileBlock.Writer(TEST_UTIL.getConfiguration(), null, meta); FSDataOutputStream outputStream = fs.create(path); HFileBlockIndex.BlockIndexWriter biw = new HFileBlockIndex.BlockIndexWriter(hbw, null, null, null); writeDataBlocksAndCreateIndex(hbw, outputStream, biw); numLevels = biw.getNumLevels(); numRootEntries = biw.getNumRootEntries(); LOG.info("Index written: numLevels=" + numLevels + ", numRootEntries=" + numRootEntries + ", rootIndexOffset=" + rootIndexOffset); } private void writeInlineBlocks(HFileBlock.Writer hbw, FSDataOutputStream outputStream, HFileBlockIndex.BlockIndexWriter biw, boolean isClosing) throws IOException { while (biw.shouldWriteBlock(isClosing)) { long offset = outputStream.getPos(); biw.writeInlineBlock(hbw.startWriting(biw.getInlineBlockType())); hbw.writeHeaderAndData(outputStream); biw.blockWritten(offset, hbw.getOnDiskSizeWithHeader(), hbw.getUncompressedSizeWithoutHeader()); LOG.info( "Wrote an inline index block at " + offset + ", size " + hbw.getOnDiskSizeWithHeader()); } } private static final long getDummyFileOffset(int i) { return i * 185 + 379; } private static final int getDummyOnDiskSize(int i) { return i * i * 37 + i * 19 + 13; } @Test public void testSecondaryIndexBinarySearch() throws IOException { int numTotalKeys = 99; assertTrue(numTotalKeys % 2 == 1); // Ensure no one made this even. // We only add odd-index keys into the array that we will binary-search. int numSearchedKeys = (numTotalKeys - 1) / 2; ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); dos.writeInt(numSearchedKeys); int curAllEntriesSize = 0; int numEntriesAdded = 0; // Only odd-index elements of this array are used to keep the secondary // index entries of the corresponding keys. int secondaryIndexEntries[] = new int[numTotalKeys]; for (int i = 0; i < numTotalKeys; ++i) { byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i * 2); KeyValue cell = new KeyValue(k, Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("val")); // KeyValue cell = new KeyValue.KeyOnlyKeyValue(k, 0, k.length); keys.add(cell.getKey()); String msgPrefix = "Key #" + i + " (" + Bytes.toStringBinary(k) + "): "; StringBuilder padding = new StringBuilder(); while (msgPrefix.length() + padding.length() < 70) padding.append(' '); msgPrefix += padding; if (i % 2 == 1) { dos.writeInt(curAllEntriesSize); secondaryIndexEntries[i] = curAllEntriesSize; LOG.info( msgPrefix + "secondary index entry #" + ((i - 1) / 2) + ", offset " + curAllEntriesSize); curAllEntriesSize += cell.getKey().length + HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD; ++numEntriesAdded; } else { secondaryIndexEntries[i] = -1; LOG.info(msgPrefix + "not in the searched array"); } } // Make sure the keys are increasing. for (int i = 0; i < keys.size() - 1; ++i) assertTrue(CellComparatorImpl.COMPARATOR.compare( new KeyValue.KeyOnlyKeyValue(keys.get(i), 0, keys.get(i).length), new KeyValue.KeyOnlyKeyValue(keys.get(i + 1), 0, keys.get(i + 1).length)) < 0); dos.writeInt(curAllEntriesSize); assertEquals(numSearchedKeys, numEntriesAdded); int secondaryIndexOffset = dos.size(); assertEquals(Bytes.SIZEOF_INT * (numSearchedKeys + 2), secondaryIndexOffset); for (int i = 1; i <= numTotalKeys - 1; i += 2) { assertEquals(dos.size(), secondaryIndexOffset + secondaryIndexEntries[i]); long dummyFileOffset = getDummyFileOffset(i); int dummyOnDiskSize = getDummyOnDiskSize(i); LOG.debug("Storing file offset=" + dummyFileOffset + " and onDiskSize=" + dummyOnDiskSize + " at offset " + dos.size()); dos.writeLong(dummyFileOffset); dos.writeInt(dummyOnDiskSize); LOG.debug("Stored key " + ((i - 1) / 2) + " at offset " + dos.size()); dos.write(keys.get(i)); } dos.writeInt(curAllEntriesSize); ByteBuffer nonRootIndex = ByteBuffer.wrap(baos.toByteArray()); for (int i = 0; i < numTotalKeys; ++i) { byte[] searchKey = keys.get(i); byte[] arrayHoldingKey = new byte[searchKey.length + searchKey.length / 2]; // To make things a bit more interesting, store the key we are looking // for at a non-zero offset in a new array. System.arraycopy(searchKey, 0, arrayHoldingKey, searchKey.length / 2, searchKey.length); KeyValue.KeyOnlyKeyValue cell = new KeyValue.KeyOnlyKeyValue(arrayHoldingKey, searchKey.length / 2, searchKey.length); int searchResult = BlockIndexReader.binarySearchNonRootIndex(cell, new MultiByteBuff(nonRootIndex), CellComparatorImpl.COMPARATOR); String lookupFailureMsg = "Failed to look up key #" + i + " (" + Bytes.toStringBinary(searchKey) + ")"; int expectedResult; int referenceItem; if (i % 2 == 1) { // This key is in the array we search as the element (i - 1) / 2. Make // sure we find it. expectedResult = (i - 1) / 2; referenceItem = i; } else { // This key is not in the array but between two elements on the array, // in the beginning, or in the end. The result should be the previous // key in the searched array, or -1 for i = 0. expectedResult = i / 2 - 1; referenceItem = i - 1; } assertEquals(lookupFailureMsg, expectedResult, searchResult); // Now test we can get the offset and the on-disk-size using a // higher-level API function.s boolean locateBlockResult = (BlockIndexReader.locateNonRootIndexEntry(new MultiByteBuff(nonRootIndex), cell, CellComparatorImpl.COMPARATOR) != -1); if (i == 0) { assertFalse(locateBlockResult); } else { assertTrue(locateBlockResult); String errorMsg = "i=" + i + ", position=" + nonRootIndex.position(); assertEquals(errorMsg, getDummyFileOffset(referenceItem), nonRootIndex.getLong()); assertEquals(errorMsg, getDummyOnDiskSize(referenceItem), nonRootIndex.getInt()); } } } @Test public void testBlockIndexChunk() throws IOException { BlockIndexChunk c = new HFileBlockIndex.BlockIndexChunkImpl(); HFileIndexBlockEncoder indexBlockEncoder = NoOpIndexBlockEncoder.INSTANCE; ByteArrayOutputStream baos = new ByteArrayOutputStream(); int N = 1000; int[] numSubEntriesAt = new int[N]; int numSubEntries = 0; for (int i = 0; i < N; ++i) { baos.reset(); DataOutputStream dos = new DataOutputStream(baos); indexBlockEncoder.encode(c, false, dos); assertEquals(c.getNonRootSize(), dos.size()); baos.reset(); dos = new DataOutputStream(baos); indexBlockEncoder.encode(c, true, dos); assertEquals(c.getRootSize(), dos.size()); byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i); numSubEntries += RNG.nextInt(5) + 1; keys.add(k); c.add(k, getDummyFileOffset(i), getDummyOnDiskSize(i), numSubEntries); } // Test the ability to look up the entry that contains a particular // deeper-level index block's entry ("sub-entry"), assuming a global // 0-based ordering of sub-entries. This is needed for mid-key calculation. for (int i = 0; i < N; ++i) { for (int j = i == 0 ? 0 : numSubEntriesAt[i - 1]; j < numSubEntriesAt[i]; ++j) { assertEquals(i, c.getEntryBySubEntry(j)); } } } /** Checks if the HeapSize calculator is within reason */ @Test public void testHeapSizeForBlockIndex() throws IOException { Class<HFileBlockIndex.BlockIndexReader> cl = HFileBlockIndex.BlockIndexReader.class; long expected = ClassSize.estimateBase(cl, false); HFileBlockIndex.BlockIndexReader bi = new HFileBlockIndex.ByteArrayKeyBlockIndexReader(1); long actual = bi.heapSize(); // Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets, // int [] blockDataSizes) are all null they are not going to show up in the // HeapSize calculation, so need to remove those array costs from expected. // Already the block keys are not there in this case expected -= ClassSize.align(2 * ClassSize.ARRAY); if (expected != actual) { expected = ClassSize.estimateBase(cl, true); assertEquals(expected, actual); } } /** * to check if looks good when midKey on a leaf index block boundary */ @Test public void testMidKeyOnLeafIndexBlockBoundary() throws IOException { Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_midkey"); int maxChunkSize = 512; conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); // should open hfile.block.index.cacheonwrite conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true); CacheConfig cacheConf = new CacheConfig(conf, BlockCacheFactory.createBlockCache(conf)); BlockCache blockCache = cacheConf.getBlockCache().get(); // Evict all blocks that were cached-on-write by the previous invocation. blockCache.evictBlocksByHfileName(hfilePath.getName()); // Write the HFile HFileContext meta = new HFileContextBuilder().withBlockSize(SMALL_BLOCK_SIZE) .withCompression(Algorithm.NONE).withDataBlockEncoding(DataBlockEncoding.NONE).build(); HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath) .withFileContext(meta).create(); Random rand = new Random(19231737); byte[] family = Bytes.toBytes("f"); byte[] qualifier = Bytes.toBytes("q"); int kvNumberToBeWritten = 16; // the new generated hfile will contain 2 leaf-index blocks and 16 data blocks, // midkey is just on the boundary of the first leaf-index block for (int i = 0; i < kvNumberToBeWritten; ++i) { byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30); // Key will be interpreted by KeyValue.KEY_COMPARATOR KeyValue kv = new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(), RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE)); writer.append(kv); } writer.close(); // close hfile.block.index.cacheonwrite conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false); // Read the HFile HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf); boolean hasArrayIndexOutOfBoundsException = false; try { // get the mid-key. reader.midKey(); } catch (ArrayIndexOutOfBoundsException e) { hasArrayIndexOutOfBoundsException = true; } finally { reader.close(); } // to check if ArrayIndexOutOfBoundsException occurred assertFalse(hasArrayIndexOutOfBoundsException); } /** * Testing block index through the HFile writer/reader APIs. Allows to test setting index block * size through configuration, intermediate-level index blocks, and caching index blocks on write. */ @Test public void testHFileWriterAndReader() throws IOException { Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_block_index"); CacheConfig cacheConf = new CacheConfig(conf, BlockCacheFactory.createBlockCache(conf)); BlockCache blockCache = cacheConf.getBlockCache().get(); for (int testI = 0; testI < INDEX_CHUNK_SIZES.length; ++testI) { int indexBlockSize = INDEX_CHUNK_SIZES[testI]; int expectedNumLevels = EXPECTED_NUM_LEVELS[testI]; LOG.info("Index block size: " + indexBlockSize + ", compression: " + compr); // Evict all blocks that were cached-on-write by the previous invocation. blockCache.evictBlocksByHfileName(hfilePath.getName()); conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize); Set<String> keyStrSet = new HashSet<>(); byte[][] keys = new byte[NUM_KV][]; byte[][] values = new byte[NUM_KV][]; // Write the HFile { HFileContext meta = new HFileContextBuilder().withBlockSize(SMALL_BLOCK_SIZE).withCompression(compr).build(); HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath) .withFileContext(meta).create(); Random rand = new Random(19231737); byte[] family = Bytes.toBytes("f"); byte[] qualifier = Bytes.toBytes("q"); for (int i = 0; i < NUM_KV; ++i) { byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i); // Key will be interpreted by KeyValue.KEY_COMPARATOR KeyValue kv = new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(), RandomKeyValueUtil.randomValue(rand)); byte[] k = kv.getKey(); writer.append(kv); keys[i] = k; values[i] = CellUtil.cloneValue(kv); keyStrSet.add(Bytes.toStringBinary(k)); if (i > 0) { assertTrue((PrivateCellUtil.compare(CellComparatorImpl.COMPARATOR, kv, keys[i - 1], 0, keys[i - 1].length)) > 0); } } writer.close(); } // Read the HFile HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf); assertEquals(expectedNumLevels, reader.getTrailer().getNumDataIndexLevels()); assertTrue(Bytes.equals(keys[0], ((KeyValue) reader.getFirstKey().get()).getKey())); assertTrue(Bytes.equals(keys[NUM_KV - 1], ((KeyValue) reader.getLastKey().get()).getKey())); LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1])); for (boolean pread : new boolean[] { false, true }) { HFileScanner scanner = reader.getScanner(conf, true, pread); for (int i = 0; i < NUM_KV; ++i) { checkSeekTo(keys, scanner, i); checkKeyValue("i=" + i, keys[i], values[i], ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue()); } assertTrue(scanner.seekTo()); for (int i = NUM_KV - 1; i >= 0; --i) { checkSeekTo(keys, scanner, i); checkKeyValue("i=" + i, keys[i], values[i], ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue()); } } // Manually compute the mid-key and validate it. HFile.Reader reader2 = reader; HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader(); HFileBlock.BlockIterator iter = fsReader.blockRange(0, reader.getTrailer().getLoadOnOpenDataOffset()); HFileBlock block; List<byte[]> blockKeys = new ArrayList<>(); while ((block = iter.nextBlock()) != null) { if (block.getBlockType() != BlockType.LEAF_INDEX) return; ByteBuff b = block.getBufferReadOnly(); int n = b.getIntAfterPosition(0); // One int for the number of items, and n + 1 for the secondary index. int entriesOffset = Bytes.SIZEOF_INT * (n + 2); // Get all the keys from the leaf index block. S for (int i = 0; i < n; ++i) { int keyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 1)); int nextKeyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 2)); int keyLen = nextKeyRelOffset - keyRelOffset; int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset + HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD; byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset + keyLen); String blockKeyStr = Bytes.toString(blockKey); blockKeys.add(blockKey); // If the first key of the block is not among the keys written, we // are not parsing the non-root index block format correctly. assertTrue("Invalid block key from leaf-level block: " + blockKeyStr, keyStrSet.contains(blockKeyStr)); } } // Validate the mid-key. assertEquals(Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)), reader.midKey()); assertEquals(UNCOMPRESSED_INDEX_SIZES[testI], reader.getTrailer().getUncompressedDataIndexSize()); reader.close(); reader2.close(); } } private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i) throws IOException { assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0, scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i]))); } private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr, ByteBuffer buf) { assertEquals( msgPrefix + ": expected " + Bytes.toStringBinary(arr) + ", actual " + Bytes.toStringBinary(buf), 0, Bytes.compareTo(arr, 0, arr.length, buf.array(), buf.arrayOffset(), buf.limit())); } /** Check a key/value pair after it was read by the reader */ private void checkKeyValue(String msgPrefix, byte[] expectedKey, byte[] expectedValue, ByteBuffer keyRead, ByteBuffer valueRead) { if (!msgPrefix.isEmpty()) msgPrefix += ". "; assertArrayEqualsBuffer(msgPrefix + "Invalid key", expectedKey, keyRead); assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue, valueRead); } @Test public void testIntermediateLevelIndicesWithLargeKeys() throws IOException { testIntermediateLevelIndicesWithLargeKeys(16); } @Test public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException { // because of the large rowKeys, we will end up with a 50-level block index without sanity check testIntermediateLevelIndicesWithLargeKeys(2); } public void testIntermediateLevelIndicesWithLargeKeys(int minNumEntries) throws IOException { Path hfPath = new Path(TEST_UTIL.getDataTestDir(), "testIntermediateLevelIndicesWithLargeKeys.hfile"); int maxChunkSize = 1024; FileSystem fs = FileSystem.get(conf); CacheConfig cacheConf = new CacheConfig(conf); conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); conf.setInt(HFileBlockIndex.MIN_INDEX_NUM_ENTRIES_KEY, minNumEntries); HFileContext context = new HFileContextBuilder().withBlockSize(16).build(); HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf).withFileContext(context) .withPath(fs, hfPath).create(); List<byte[]> keys = new ArrayList<>(); // This should result in leaf-level indices and a root level index for (int i = 0; i < 100; i++) { byte[] rowkey = new byte[maxChunkSize + 1]; byte[] b = Bytes.toBytes(i); System.arraycopy(b, 0, rowkey, rowkey.length - b.length, b.length); keys.add(rowkey); hfw.append(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(rowkey) .setFamily(HConstants.EMPTY_BYTE_ARRAY).setQualifier(HConstants.EMPTY_BYTE_ARRAY) .setTimestamp(HConstants.LATEST_TIMESTAMP).setType(KeyValue.Type.Maximum.getCode()) .setValue(HConstants.EMPTY_BYTE_ARRAY).build()); } hfw.close(); HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf); // Scanner doesn't do Cells yet. Fix. HFileScanner scanner = reader.getScanner(conf, true, true); for (int i = 0; i < keys.size(); ++i) { scanner.seekTo(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) .setRow(keys.get(i)).setFamily(HConstants.EMPTY_BYTE_ARRAY) .setQualifier(HConstants.EMPTY_BYTE_ARRAY).setTimestamp(HConstants.LATEST_TIMESTAMP) .setType(KeyValue.Type.Maximum.getCode()).setValue(HConstants.EMPTY_BYTE_ARRAY).build()); } reader.close(); } /** * This test is for HBASE-27940, which midkey metadata in root index block would always be ignored * by {@link BlockIndexReader#readMultiLevelIndexRoot}. */ @Test public void testMidKeyReadSuccessfullyFromRootIndexBlock() throws IOException { conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, 128); Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "testMidKeyReadSuccessfullyFromRootIndexBlock"); Compression.Algorithm compressAlgo = Compression.Algorithm.NONE; int entryCount = 50000; HFileContext context = new HFileContextBuilder().withBlockSize(4096).withIncludesTags(false) .withDataBlockEncoding(DataBlockEncoding.NONE).withCompression(compressAlgo).build(); try (HFile.Writer writer = new HFile.WriterFactory(conf, new CacheConfig(conf)) .withPath(fs, hfilePath).withFileContext(context).create()) { List<KeyValue> keyValues = new ArrayList<>(entryCount); for (int i = 0; i < entryCount; ++i) { byte[] keyBytes = RandomKeyValueUtil.randomOrderedKey(RNG, i); // A random-length random value. byte[] valueBytes = RandomKeyValueUtil.randomValue(RNG); KeyValue keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP, valueBytes); writer.append(keyValue); keyValues.add(keyValue); } } try (FSDataInputStream fsdis = fs.open(hfilePath)) { long fileSize = fs.getFileStatus(hfilePath).getLen(); FixedFileTrailer trailer = FixedFileTrailer.readFromStream(fsdis, fileSize); assertEquals(3, trailer.getMajorVersion()); assertEquals(entryCount, trailer.getEntryCount()); HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo) .withIncludesMvcc(false).withIncludesTags(false) .withDataBlockEncoding(DataBlockEncoding.NONE).withHBaseCheckSum(true).build(); ReaderContext readerContext = new ReaderContextBuilder().withInputStreamWrapper(new FSDataInputStreamWrapper(fsdis)) .withFilePath(hfilePath).withFileSystem(fs).withFileSize(fileSize).build(); HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(readerContext, meta, ByteBuffAllocator.HEAP, conf); MyEncoder encoder = new MyEncoder(); HFileBlockIndex.CellBasedKeyBlockIndexReaderV2 dataBlockIndexReader = new HFileBlockIndex.CellBasedKeyBlockIndexReaderV2(trailer.createComparator(), trailer.getNumDataIndexLevels(), encoder); HFileBlock.BlockIterator blockIter = blockReader.blockRange(trailer.getLoadOnOpenDataOffset(), fileSize - trailer.getTrailerSize()); // Data index. We also read statistics about the block index written after // the root level. dataBlockIndexReader.readMultiLevelIndexRoot( blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount()); NoOpEncodedSeeker noOpEncodedSeeker = (NoOpEncodedSeeker) encoder.encoderSeeker; // Assert we have read midkey metadata successfully. assertTrue(noOpEncodedSeeker.midLeafBlockOffset >= 0); assertTrue(noOpEncodedSeeker.midLeafBlockOnDiskSize > 0); assertTrue(noOpEncodedSeeker.midKeyEntry >= 0); } } static class MyEncoder implements HFileIndexBlockEncoder { EncodedSeeker encoderSeeker; @Override public void saveMetadata(Writer writer) throws IOException { NoOpIndexBlockEncoder.INSTANCE.saveMetadata(writer); } @Override public void encode(BlockIndexChunk blockIndexChunk, boolean rootIndexBlock, DataOutput out) throws IOException { NoOpIndexBlockEncoder.INSTANCE.encode(blockIndexChunk, rootIndexBlock, out); } @Override public IndexBlockEncoding getIndexBlockEncoding() { return NoOpIndexBlockEncoder.INSTANCE.getIndexBlockEncoding(); } @Override public EncodedSeeker createSeeker() { encoderSeeker = NoOpIndexBlockEncoder.INSTANCE.createSeeker(); return encoderSeeker; } } }
apache/derby
34,744
java/org.apache.derby.engine/org/apache/derby/impl/sql/compile/DeleteNode.java
/* Derby - Class org.apache.derby.impl.sql.compile.DeleteNode Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.compile; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Properties; import org.apache.derby.catalog.UUID; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.shared.common.reference.ClassName; import org.apache.derby.shared.common.reference.SQLState; import org.apache.derby.iapi.services.classfile.VMOpcode; import org.apache.derby.iapi.services.compiler.LocalField; import org.apache.derby.iapi.services.compiler.MethodBuilder; import org.apache.derby.iapi.services.context.ContextManager; import org.apache.derby.iapi.services.io.FormatableBitSet; import org.apache.derby.iapi.services.io.FormatableProperties; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.sql.ResultDescription; import org.apache.derby.iapi.sql.StatementType; import org.apache.derby.iapi.sql.compile.CompilerContext; import org.apache.derby.iapi.sql.compile.IgnoreFilter; import org.apache.derby.iapi.sql.compile.ScopeFilter; import org.apache.derby.iapi.sql.conn.Authorizer; import org.apache.derby.iapi.sql.dictionary.ColumnDescriptor; import org.apache.derby.iapi.sql.dictionary.ColumnDescriptorList; import org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptor; import org.apache.derby.iapi.sql.dictionary.DataDictionary; import org.apache.derby.iapi.sql.dictionary.TriggerDescriptorList; import org.apache.derby.iapi.sql.dictionary.TableDescriptor; import org.apache.derby.iapi.sql.dictionary.TriggerDescriptor; import org.apache.derby.iapi.sql.execute.ConstantAction; import org.apache.derby.iapi.store.access.StaticCompiledOpenConglomInfo; import org.apache.derby.iapi.store.access.TransactionController; import org.apache.derby.vti.DeferModification; /** * A DeleteNode represents a DELETE statement. It is the top-level node * for the statement. * * For positioned delete, there may be no from table specified. * The from table will be derived from the cursor specification of * the named cursor. * */ class DeleteNode extends DMLModStatementNode { /* Column name for the RowLocation column in the ResultSet */ private static final String COLUMNNAME = "###RowLocationToDelete"; /* Filled in by bind. */ private boolean deferred; private FromTable targetTable; private FormatableBitSet readColsBitSet; private ConstantAction[] dependentConstantActions; private boolean cascadeDelete; private StatementNode[] dependentNodes; /** * Constructor for a DeleteNode. * * @param targetTableName The name of the table to delete from * @param queryExpression The query expression that will generate * the rows to delete from the given table * @param matchingClause Non-null if this DML is part of a MATCHED clause of a MERGE statement. * @param cm The context manager */ DeleteNode ( TableName targetTableName, ResultSetNode queryExpression, MatchingClauseNode matchingClause, ContextManager cm ) { super( queryExpression, matchingClause, cm ); this.targetTableName = targetTableName; } @Override String statementToString() { return "DELETE"; } /** * Bind this DeleteNode. This means looking up tables and columns and * getting their types, and figuring out the result types of all * expressions, as well as doing view resolution, permissions checking, * etc. * <p> * If any indexes need to be updated, we add all the columns in the * base table to the result column list, so that we can use the column * values as look-up keys for the index rows to be deleted. Binding a * delete will also massage the tree so that the ResultSetNode has * column containing the RowLocation of the base row. * * * @exception StandardException Thrown on error */ @Override public void bindStatement() throws StandardException { // We just need select privilege on the where clause tables getCompilerContext().pushCurrentPrivType( Authorizer.SELECT_PRIV); try { FromList fromList = new FromList( getOptimizerFactory().doJoinOrderOptimization(), getContextManager()); ResultColumn rowLocationColumn = null; CurrentRowLocationNode rowLocationNode; TableName cursorTargetTableName = null; CurrentOfNode currentOfNode = null; // // Don't add privilege requirements for the UDT types of columns. // The compiler will attempt to add these when generating the full column list during // binding of the tables. // IgnoreFilter ignorePermissions = new IgnoreFilter(); getCompilerContext().addPrivilegeFilter( ignorePermissions ); DataDictionary dataDictionary = getDataDictionary(); // for DELETE clause of a MERGE statement, the tables have already been bound if ( !inMatchingClause() ) { super.bindTables(dataDictionary); } // wait to bind named target table until the underlying // cursor is bound, so that we can get it from the // cursor if this is a positioned delete. // for positioned delete, get the cursor's target table. if (SanityManager.DEBUG) SanityManager.ASSERT(resultSet != null && resultSet instanceof SelectNode, "Delete must have a select result set"); SelectNode sel = (SelectNode)resultSet; targetTable = (FromTable) sel.fromList.elementAt(0); if (targetTable instanceof CurrentOfNode) { currentOfNode = (CurrentOfNode) targetTable; cursorTargetTableName = inMatchingClause() ? targetTableName : currentOfNode.getBaseCursorTargetTableName(); // instead of an assert, we might say the cursor is not updatable. if (SanityManager.DEBUG) SanityManager.ASSERT(cursorTargetTableName != null); } if (targetTable instanceof FromVTI) { targetVTI = (FromVTI) targetTable; targetVTI.setTarget(); } else { // positioned delete can leave off the target table. // we get it from the cursor supplying the position. if (targetTableName == null) { // verify we have current of if (SanityManager.DEBUG) SanityManager.ASSERT(cursorTargetTableName!=null); targetTableName = cursorTargetTableName; } // for positioned delete, we need to verify that // the named table is the same as the cursor's target (base table name). else if (cursorTargetTableName != null) { // this match requires that the named table in the delete // be the same as a base name in the cursor. if ( !targetTableName.equals(cursorTargetTableName)) { throw StandardException.newException(SQLState.LANG_CURSOR_DELETE_MISMATCH, targetTableName, currentOfNode.getCursorName()); } } } // descriptor must exist, tables already bound. verifyTargetTable(); /* Generate a select list for the ResultSetNode - CurrentRowLocation(). */ if ( SanityManager.DEBUG ) { SanityManager.ASSERT((resultSet.getResultColumns() == null), "resultColumns is expected to be null until bind time"); } if (targetTable instanceof FromVTI) { getResultColumnList(); resultColumnList = targetTable.getResultColumnsForList(null, resultColumnList, null); /* Set the new result column list in the result set */ resultSet.setResultColumns(resultColumnList); } else { /* ** Start off assuming no columns from the base table ** are needed in the rcl. */ resultColumnList = new ResultColumnList(getContextManager()); FromBaseTable fbt = getResultColumnList(resultColumnList); readColsBitSet = getReadMap(dataDictionary, targetTableDescriptor); resultColumnList = fbt.addColsToList(resultColumnList, readColsBitSet); /* ** If all bits are set, then behave as if we chose all ** in the first place */ int i = 1; int size = targetTableDescriptor.getMaxColumnID(); for (; i <= size; i++) { if (!readColsBitSet.get(i)) { break; } } if (i > size) { readColsBitSet = null; } /* Generate the RowLocation column */ rowLocationNode = new CurrentRowLocationNode(getContextManager()); rowLocationColumn = new ResultColumn(COLUMNNAME, rowLocationNode, getContextManager()); rowLocationColumn.markGenerated(); /* Append to the ResultColumnList */ resultColumnList.addResultColumn(rowLocationColumn); /* Force the added columns to take on the table's correlation name, if any */ correlateAddedColumns( resultColumnList, targetTable ); /* Add the new result columns to the driving result set */ ResultColumnList originalRCL = resultSet.getResultColumns(); if ( originalRCL != null ) { originalRCL.appendResultColumns( resultColumnList, false ); resultColumnList = originalRCL; } resultSet.setResultColumns(resultColumnList); } // done excluding column types from privilege checking getCompilerContext().removePrivilegeFilter( ignorePermissions ); /* Bind the expressions before the ResultColumns are bound */ // only add privileges when we're inside the WHERE clause ScopeFilter scopeFilter = new ScopeFilter( getCompilerContext(), CompilerContext.WHERE_SCOPE, 1 ); getCompilerContext().addPrivilegeFilter( scopeFilter ); super.bindExpressions(); getCompilerContext().removePrivilegeFilter( scopeFilter ); /* Bind untyped nulls directly under the result columns */ resultSet.getResultColumns(). bindUntypedNullsToResultColumns(resultColumnList); if (! (targetTable instanceof FromVTI)) { /* Bind the new ResultColumn */ rowLocationColumn.bindResultColumnToExpression(); bindConstraints(dataDictionary, getOptimizerFactory(), targetTableDescriptor, null, resultColumnList, (int[]) null, readColsBitSet, true, // we alway include triggers in core language new boolean[1]); // dummy /* If the target table is also a source table, then * the delete will have to be in deferred mode * For deletes, this means that the target table appears in a * subquery. Also, self-referencing foreign key deletes * are deferred. And triggers cause the delete to be deferred. */ if (resultSet.subqueryReferencesTarget( targetTableDescriptor.getName(), true) || requiresDeferredProcessing()) { deferred = true; } } else { deferred = VTIDeferModPolicy.deferIt( DeferModification.DELETE_STATEMENT, targetVTI, null, sel.getWhereClause()); } /* Verify that all underlying ResultSets reclaimed their FromList */ if (SanityManager.DEBUG) { SanityManager.ASSERT(fromList.size() == 0, "fromList.size() is expected to be 0, not " + fromList.size() + " on return from RS.bindExpressions()"); } //In case of cascade delete , create nodes for //the ref action dependent tables and bind them. if(fkTableNames != null) { String currentTargetTableName = targetTableDescriptor.getSchemaName() + "." + targetTableDescriptor.getName(); if(!isDependentTable){ //graph node dependentTables = new HashSet<String>(); } /*Check whether the current target has already been explored. *If we are seeing the same table name which we binded earlier *means we have cyclic references. */ if (dependentTables.add(currentTargetTableName)) { cascadeDelete = true; int noDependents = fkTableNames.length; dependentNodes = new StatementNode[noDependents]; for(int i =0 ; i < noDependents ; i ++) { dependentNodes[i] = getDependentTableNode( fkSchemaNames[i], fkTableNames[i], fkRefActions[i], fkColDescriptors[i]); dependentNodes[i].bindStatement(); } } } else { //case where current dependent table does not have dependent tables if(isDependentTable) { String currentTargetTableName = targetTableDescriptor.getSchemaName() + "." + targetTableDescriptor.getName(); dependentTables.add(currentTargetTableName); } } // add need for DELETE privilege on the target table getCompilerContext().pushCurrentPrivType( getPrivType()); getCompilerContext().addRequiredTablePriv( targetTableDescriptor); getCompilerContext().popCurrentPrivType(); } finally { getCompilerContext().popCurrentPrivType(); } } // end of bind @Override int getPrivType() { return Authorizer.DELETE_PRIV; } /** * Return true if the node references SESSION schema tables (temporary or permanent) * * @return true if references SESSION schema tables, else false * * @exception StandardException Thrown on error */ @Override public boolean referencesSessionSchema() throws StandardException { //If delete table is on a SESSION schema table, then return true. return resultSet.referencesSessionSchema(); } /** * Compile constants that Execution will use * * @exception StandardException Thrown on failure */ @Override public ConstantAction makeConstantAction() throws StandardException { /* Different constant actions for base tables and updatable VTIs */ if (targetTableDescriptor != null) { // Base table int lckMode = resultSet.updateTargetLockMode(); long heapConglomId = targetTableDescriptor.getHeapConglomerateId(); TransactionController tc = getLanguageConnectionContext().getTransactionCompile(); StaticCompiledOpenConglomInfo[] indexSCOCIs = new StaticCompiledOpenConglomInfo[indexConglomerateNumbers.length]; for (int index = 0; index < indexSCOCIs.length; index++) { indexSCOCIs[index] = tc.getStaticCompiledConglomInfo(indexConglomerateNumbers[index]); } /* ** Do table locking if the table's lock granularity is ** set to table. */ if (targetTableDescriptor.getLockGranularity() == TableDescriptor.TABLE_LOCK_GRANULARITY) { lckMode = TransactionController.MODE_TABLE; } ResultDescription resultDescription = null; if(isDependentTable) { //triggers need the result description , //dependent tables don't have a source from generation time //to get the result description resultDescription = makeResultDescription(); } return getGenericConstantActionFactory().getDeleteConstantAction ( heapConglomId, targetTableDescriptor.getTableType(), tc.getStaticCompiledConglomInfo(heapConglomId), indicesToMaintain, indexConglomerateNumbers, indexSCOCIs, deferred, false, targetTableDescriptor.getUUID(), lckMode, null, null, null, 0, null, null, resultDescription, getFKInfo(), getTriggerInfo(), (readColsBitSet == null) ? (FormatableBitSet)null : new FormatableBitSet(readColsBitSet), getReadColMap(targetTableDescriptor.getNumberOfColumns(),readColsBitSet), resultColumnList.getStreamStorableColIds(targetTableDescriptor.getNumberOfColumns()), (readColsBitSet == null) ? targetTableDescriptor.getNumberOfColumns() : readColsBitSet.getNumBitsSet(), (UUID) null, resultSet.isOneRowResultSet(), dependentConstantActions, inMatchingClause()); } else { /* Return constant action for VTI * NOTE: ConstantAction responsible for preserving instantiated * VTIs for in-memory queries and for only preserving VTIs * that implement Serializable for SPSs. */ return getGenericConstantActionFactory().getUpdatableVTIConstantAction( DeferModification.DELETE_STATEMENT, deferred); } } /** * Code generation for delete. * The generated code will contain: * o A static member for the (xxx)ResultSet with the RowLocations * o The static member will be assigned the appropriate ResultSet within * the nested calls to get the ResultSets. (The appropriate cast to the * (xxx)ResultSet will be generated.) * o The CurrentRowLocation() in SelectNode's select list will generate * a new method for returning the RowLocation as well as a call to * that method which will be stuffed in the call to the * ProjectRestrictResultSet. * o In case of referential actions, this function generate an * array of resultsets on its dependent tables. * * @param acb The ActivationClassBuilder for the class being built * @param mb The execute() method to be built * * @exception StandardException Thrown on error */ @Override void generate(ActivationClassBuilder acb, MethodBuilder mb) throws StandardException { // If the DML is on the temporary table, generate the code to // mark temporary table as modified in the current UOW. After // DERBY-827 this must be done in execute() since // createResultSet() will only be called once. generateCodeForTemporaryTable(acb); /* generate the parameters */ if(!isDependentTable) generateParameterValueSet(acb); acb.pushGetResultSetFactoryExpression(mb); acb.newRowLocationScanResultSetName(); // arg 1 if ( inMatchingClause() ) { matchingClause.generateResultSetField( acb, mb ); } else { resultSet.generate( acb, mb ); } String resultSetGetter; int argCount; String parentResultSetId; // Base table if (targetTableDescriptor != null) { /* Create the declaration for the scan ResultSet which generates the * RowLocations to be deleted. * Note that the field cannot be static because there * can be multiple activations of the same activation class, * and they can't share this field. Only exprN fields can * be shared (or, more generally, read-only fields). * RESOLVE - Need to deal with the type of the field. */ acb.newFieldDeclaration(Modifier.PRIVATE, ClassName.CursorResultSet, acb.getRowLocationScanResultSetName()); if(cascadeDelete || isDependentTable) { resultSetGetter = "getDeleteCascadeResultSet"; argCount = 4; } else { resultSetGetter = "getDeleteResultSet"; argCount = 1; } } else { argCount = 1; resultSetGetter = "getDeleteVTIResultSet"; } if(isDependentTable) { mb.push(acb.addItem(makeConstantAction())); }else { if(cascadeDelete) { mb.push(-1); //root table. } } String resultSetArrayType = ClassName.ResultSet + "[]"; if(cascadeDelete) { parentResultSetId = targetTableDescriptor.getSchemaName() + "." + targetTableDescriptor.getName(); // Generate the code to build the array LocalField arrayField = acb.newFieldDeclaration(Modifier.PRIVATE, resultSetArrayType); mb.pushNewArray(ClassName.ResultSet, dependentNodes.length); // new ResultSet[size] mb.setField(arrayField); for(int index=0 ; index < dependentNodes.length ; index++) { dependentNodes[index].setRefActionInfo(fkIndexConglomNumbers[index], fkColArrays[index], parentResultSetId, true); mb.getField(arrayField); // first arg (resultset array reference) /*beetle:5360 : if too many statements are added to a method, *size of method can hit 65k limit, which will *lead to the class format errors at load time. *To avoid this problem, when number of statements added *to a method is > 2048, remaing statements are added to a new function *and called from the function which created the function. *See Beetle 5135 or 4293 for further details on this type of problem. */ if(mb.statementNumHitLimit(10)) { MethodBuilder dmb = acb.newGeneratedFun(ClassName.ResultSet, Modifier.PRIVATE); dependentNodes[index].generate(acb,dmb); //generates the resultset expression dmb.methodReturn(); dmb.complete(); /* Generate the call to the new method */ mb.pushThis(); //second arg will be generated by this call mb.callMethod(VMOpcode.INVOKEVIRTUAL, (String) null, dmb.getName(), ClassName.ResultSet, 0); }else { dependentNodes[index].generate(acb,mb); //generates the resultset expression } mb.setArrayElement(index); } mb.getField(arrayField); // fourth argument - array reference } else { if(isDependentTable) { mb.pushNull(resultSetArrayType); //No dependent tables for this table } } if(cascadeDelete || isDependentTable) { parentResultSetId = targetTableDescriptor.getSchemaName() + "." + targetTableDescriptor.getName(); mb.push(parentResultSetId); } mb.callMethod(VMOpcode.INVOKEINTERFACE, (String) null, resultSetGetter, ClassName.ResultSet, argCount); if(!isDependentTable && cascadeDelete) { int numResultSets = acb.getRowCount(); if(numResultSets > 0) { //generate activation.raParentResultSets = new NoPutResultSet[size] MethodBuilder constructor = acb.getConstructor(); constructor.pushThis(); constructor.pushNewArray(ClassName.CursorResultSet, numResultSets); constructor.putField(ClassName.BaseActivation, "raParentResultSets", ClassName.CursorResultSet + "[]"); constructor.endStatement(); } } } /** * Return the type of statement, something from * StatementType. * * @return the type of statement */ @Override protected final int getStatementType() { return StatementType.DELETE; } /** * Gets the map of all columns which must be read out of the base table. * These are the columns needed to: * * o maintain indices * o maintain foreign keys * * The returned map is a FormatableBitSet with 1 bit for each column in the * table plus an extra, unsued 0-bit. If a 1-based column id must * be read from the base table, then the corresponding 1-based bit * is turned ON in the returned FormatableBitSet. * * @param dd the data dictionary to look in * @param baseTable the base table descriptor * * @return a FormatableBitSet of columns to be read out of the base table * * @exception StandardException Thrown on error */ public FormatableBitSet getReadMap ( DataDictionary dd, TableDescriptor baseTable ) throws StandardException { boolean[] needsDeferredProcessing = new boolean[1]; needsDeferredProcessing[0] = requiresDeferredProcessing(); ArrayList<ConglomerateDescriptor> conglomerates = new ArrayList<ConglomerateDescriptor>(); relevantTriggers = new TriggerDescriptorList(); FormatableBitSet columnMap = DeleteNode.getDeleteReadMap(baseTable, conglomerates, relevantTriggers, needsDeferredProcessing); markAffectedIndexes(conglomerates); adjustDeferredFlag( needsDeferredProcessing[0] ); return columnMap; } /** * In case of referential actions, we require to perform * DML (UPDATE or DELETE) on the dependent tables. * Following function returns the DML Node for the dependent table. */ private StatementNode getDependentTableNode(String schemaName, String tableName, int refAction, ColumnDescriptorList cdl) throws StandardException { DMLModStatementNode node = null; if(refAction == StatementType.RA_CASCADE) { node = getEmptyDeleteNode(schemaName , tableName); } if(refAction == StatementType.RA_SETNULL) { node = getEmptyUpdateNode(schemaName , tableName, cdl); } // The dependent node should be marked as such, and it should inherit // the set of dependent tables from the parent so that it can break // out of cycles in the dependency graph. if (node != null) { node.isDependentTable = true; node.dependentTables = dependentTables; } return node; } private DeleteNode getEmptyDeleteNode(String schemaName, String targetTableName) throws StandardException { ValueNode whereClause = null; TableName tableName = new TableName(schemaName , targetTableName, getContextManager()); FromList fromList = new FromList(getContextManager()); FromTable fromTable = new FromBaseTable( tableName, null, FromBaseTable.DELETE, null, getContextManager()); //we would like to use references index & table scan instead of //what optimizer says for the dependent table scan. Properties targetProperties = new FormatableProperties(); targetProperties.put("index", "null"); ((FromBaseTable) fromTable).setTableProperties(targetProperties); fromList.addFromTable(fromTable); SelectNode rs = new SelectNode(null, fromList, /* FROM list */ whereClause, /* WHERE clause */ null, /* GROUP BY list */ null, /* having clause */ null, /* windows */ null, /* optimizer override plan */ getContextManager()); return new DeleteNode(tableName, rs, null, getContextManager()); } private UpdateNode getEmptyUpdateNode(String schemaName, String targetTableName, ColumnDescriptorList cdl) throws StandardException { ValueNode whereClause = null; TableName tableName = new TableName(schemaName , targetTableName, getContextManager()); FromList fromList = new FromList(getContextManager()); FromTable fromTable = new FromBaseTable( tableName, null, FromBaseTable.DELETE, null, getContextManager()); //we would like to use references index & table scan instead of //what optimizer says for the dependent table scan. Properties targetProperties = new FormatableProperties(); targetProperties.put("index", "null"); ((FromBaseTable) fromTable).setTableProperties(targetProperties); fromList.addFromTable(fromTable); SelectNode sn = new SelectNode(getSetClause(cdl), fromList, /* FROM list */ whereClause, /* WHERE clause */ null, /* GROUP BY list */ null, /* having clause */ null, /* windows */ null, /* optimizer override plan */ getContextManager()); return new UpdateNode(tableName, sn, null, getContextManager()); } private ResultColumnList getSetClause(ColumnDescriptorList cdl) throws StandardException { ResultColumn resultColumn; ValueNode valueNode; ResultColumnList columnList = new ResultColumnList(getContextManager()); valueNode = new UntypedNullConstantNode(getContextManager()); for(int index =0 ; index < cdl.size() ; index++) { ColumnDescriptor cd = cdl.elementAt(index); //only columns that are nullable need to be set to 'null' for ON //DELETE SET NULL if((cd.getType()).isNullable()) { resultColumn = new ResultColumn(cd, valueNode, getContextManager()); columnList.addResultColumn(resultColumn); } } return columnList; } @Override public void optimizeStatement() throws StandardException { // Don't add any more permissions during pre-processing IgnoreFilter ignorePermissions = new IgnoreFilter(); getCompilerContext().addPrivilegeFilter( ignorePermissions ); if(cascadeDelete) { for(int index=0 ; index < dependentNodes.length ; index++) { dependentNodes[index].optimizeStatement(); } } super.optimizeStatement(); // allow more permissions to be added in case we're just one action // of a MERGE statement getCompilerContext().removePrivilegeFilter( ignorePermissions ); } /** * Builds a bitmap of all columns which should be read from the * Store in order to satisfy an DELETE statement. * * * 1) finds all indices on this table * 2) adds the index columns to a bitmap of affected columns * 3) adds the index descriptors to a list of conglomerate * descriptors. * 4) finds all DELETE triggers on the table * 5) if there are any DELETE triggers, then do one of the following * a)If all of the triggers have MISSING referencing clause, then that * means that the trigger actions do not have access to before and * after values. In that case, there is no need to blanketly decide * to include all the columns in the read map just because there are * triggers defined on the table. * b)Since one/more triggers have REFERENCING clause on them, get all * the columns because we don't know what the user will ultimately * reference. * 6) adds the triggers to an evolving list of triggers * * @param conglomerates OUT: list of affected indices * @param relevantTriggers IN/OUT. Passed in as an empty list. Filled in as we go. * @param needsDeferredProcessing IN/OUT. true if the statement already needs * deferred processing. set while evaluating this * routine if a trigger requires * deferred processing * * @return a FormatableBitSet of columns to be read out of the base table * * @exception StandardException Thrown on error */ private static FormatableBitSet getDeleteReadMap ( TableDescriptor baseTable, List<ConglomerateDescriptor> conglomerates, TriggerDescriptorList relevantTriggers, boolean[] needsDeferredProcessing ) throws StandardException { int columnCount = baseTable.getMaxColumnID(); FormatableBitSet columnMap = new FormatableBitSet(columnCount + 1); /* ** Get a list of the indexes that need to be ** updated. ColumnMap contains all indexed ** columns where 1 or more columns in the index ** are going to be modified. ** ** Notice that we don't need to add constraint ** columns. This is because we add all key constraints ** (e.g. foreign keys) as a side effect of adding their ** indexes above. And we don't need to deal with ** check constraints on a delete. ** ** Adding indexes also takes care of the replication ** requirement of having the primary key. */ DMLModStatementNode.getXAffectedIndexes( baseTable, null, columnMap, conglomerates); /* ** If we have any DELETE triggers, then do one of the following ** 1)If all of the triggers have MISSING referencing clause, then that ** means that the trigger actions do not have access to before and ** after values. In that case, there is no need to blanketly decide to ** include all the columns in the read map just because there are ** triggers defined on the table. ** 2)Since one/more triggers have REFERENCING clause on them, get all ** the columns because we don't know what the user will ultimately reference. */ baseTable.getAllRelevantTriggers( StatementType.DELETE, (int[])null, relevantTriggers ); if (relevantTriggers.size() > 0) { needsDeferredProcessing[0] = true; boolean needToIncludeAllColumns = false; for (TriggerDescriptor trd : relevantTriggers) { //Does this trigger have REFERENCING clause defined on it. //If yes, then read all the columns from the trigger table. if (!trd.getReferencingNew() && !trd.getReferencingOld()) continue; else { needToIncludeAllColumns = true; break; } } if (needToIncludeAllColumns) { for (int i = 1; i <= columnCount; i++) { columnMap.set(i); } } } return columnMap; } /* * Force column references (particularly those added by the compiler) * to use the correlation name on the base table, if any. */ private void correlateAddedColumns( ResultColumnList rcl, FromTable fromTable ) throws StandardException { String correlationName = fromTable.getCorrelationName(); if ( correlationName == null ) { return; } TableName correlationNameNode = makeTableName( null, correlationName ); for (ResultColumn column : rcl) { ValueNode expression = column.getExpression(); if ( (expression != null) && (expression instanceof ColumnReference) ) { ColumnReference reference = (ColumnReference) expression; reference.setQualifiedTableName( correlationNameNode ); } } } }
googleapis/google-api-java-client-services
36,840
clients/google-api-services-iamcredentials/v1/1.28.0/com/google/api/services/iamcredentials/v1/IAMCredentials.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.iamcredentials.v1; /** * Service definition for IAMCredentials (v1). * * <p> * Creates short-lived, limited-privilege credentials for IAM service accounts. * </p> * * <p> * For more information about this service, see the * <a href="https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link IAMCredentialsRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class IAMCredentials extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.28.0 of the IAM Service Account Credentials API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://iamcredentials.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public IAMCredentials(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ IAMCredentials(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Projects collection. * * <p>The typical use is:</p> * <pre> * {@code IAMCredentials iamcredentials = new IAMCredentials(...);} * {@code IAMCredentials.Projects.List request = iamcredentials.projects().list(parameters ...)} * </pre> * * @return the resource collection */ public Projects projects() { return new Projects(); } /** * The "projects" collection of methods. */ public class Projects { /** * An accessor for creating requests from the ServiceAccounts collection. * * <p>The typical use is:</p> * <pre> * {@code IAMCredentials iamcredentials = new IAMCredentials(...);} * {@code IAMCredentials.ServiceAccounts.List request = iamcredentials.serviceAccounts().list(parameters ...)} * </pre> * * @return the resource collection */ public ServiceAccounts serviceAccounts() { return new ServiceAccounts(); } /** * The "serviceAccounts" collection of methods. */ public class ServiceAccounts { /** * Generates an OAuth 2.0 access token for a service account. * * Create a request for the method "serviceAccounts.generateAccessToken". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote * operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest} * @return the request */ public GenerateAccessToken generateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) throws java.io.IOException { GenerateAccessToken result = new GenerateAccessToken(name, content); initialize(result); return result; } public class GenerateAccessToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse> { private static final String REST_PATH = "v1/{+name}:generateAccessToken"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Generates an OAuth 2.0 access token for a service account. * * Create a request for the method "serviceAccounts.generateAccessToken". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote * operation. <p> {@link GenerateAccessToken#initialize(com.google.api.client.googleapis.services. * AbstractGoogleClientRequest)} must be called to initialize this instance immediately after * invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest} * @since 1.13 */ protected GenerateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public GenerateAccessToken set$Xgafv(java.lang.String $Xgafv) { return (GenerateAccessToken) super.set$Xgafv($Xgafv); } @Override public GenerateAccessToken setAccessToken(java.lang.String accessToken) { return (GenerateAccessToken) super.setAccessToken(accessToken); } @Override public GenerateAccessToken setAlt(java.lang.String alt) { return (GenerateAccessToken) super.setAlt(alt); } @Override public GenerateAccessToken setCallback(java.lang.String callback) { return (GenerateAccessToken) super.setCallback(callback); } @Override public GenerateAccessToken setFields(java.lang.String fields) { return (GenerateAccessToken) super.setFields(fields); } @Override public GenerateAccessToken setKey(java.lang.String key) { return (GenerateAccessToken) super.setKey(key); } @Override public GenerateAccessToken setOauthToken(java.lang.String oauthToken) { return (GenerateAccessToken) super.setOauthToken(oauthToken); } @Override public GenerateAccessToken setPrettyPrint(java.lang.Boolean prettyPrint) { return (GenerateAccessToken) super.setPrettyPrint(prettyPrint); } @Override public GenerateAccessToken setQuotaUser(java.lang.String quotaUser) { return (GenerateAccessToken) super.setQuotaUser(quotaUser); } @Override public GenerateAccessToken setUploadType(java.lang.String uploadType) { return (GenerateAccessToken) super.setUploadType(uploadType); } @Override public GenerateAccessToken setUploadProtocol(java.lang.String uploadProtocol) { return (GenerateAccessToken) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public GenerateAccessToken setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public GenerateAccessToken set(String parameterName, Object value) { return (GenerateAccessToken) super.set(parameterName, value); } } /** * Generates an OpenID Connect ID token for a service account. * * Create a request for the method "serviceAccounts.generateIdToken". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote * operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest} * @return the request */ public GenerateIdToken generateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) throws java.io.IOException { GenerateIdToken result = new GenerateIdToken(name, content); initialize(result); return result; } public class GenerateIdToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse> { private static final String REST_PATH = "v1/{+name}:generateIdToken"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Generates an OpenID Connect ID token for a service account. * * Create a request for the method "serviceAccounts.generateIdToken". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote * operation. <p> {@link GenerateIdToken#initialize(com.google.api.client.googleapis.services.Abst * ractGoogleClientRequest)} must be called to initialize this instance immediately after invoking * the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest} * @since 1.13 */ protected GenerateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public GenerateIdToken set$Xgafv(java.lang.String $Xgafv) { return (GenerateIdToken) super.set$Xgafv($Xgafv); } @Override public GenerateIdToken setAccessToken(java.lang.String accessToken) { return (GenerateIdToken) super.setAccessToken(accessToken); } @Override public GenerateIdToken setAlt(java.lang.String alt) { return (GenerateIdToken) super.setAlt(alt); } @Override public GenerateIdToken setCallback(java.lang.String callback) { return (GenerateIdToken) super.setCallback(callback); } @Override public GenerateIdToken setFields(java.lang.String fields) { return (GenerateIdToken) super.setFields(fields); } @Override public GenerateIdToken setKey(java.lang.String key) { return (GenerateIdToken) super.setKey(key); } @Override public GenerateIdToken setOauthToken(java.lang.String oauthToken) { return (GenerateIdToken) super.setOauthToken(oauthToken); } @Override public GenerateIdToken setPrettyPrint(java.lang.Boolean prettyPrint) { return (GenerateIdToken) super.setPrettyPrint(prettyPrint); } @Override public GenerateIdToken setQuotaUser(java.lang.String quotaUser) { return (GenerateIdToken) super.setQuotaUser(quotaUser); } @Override public GenerateIdToken setUploadType(java.lang.String uploadType) { return (GenerateIdToken) super.setUploadType(uploadType); } @Override public GenerateIdToken setUploadProtocol(java.lang.String uploadProtocol) { return (GenerateIdToken) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public GenerateIdToken setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public GenerateIdToken set(String parameterName, Object value) { return (GenerateIdToken) super.set(parameterName, value); } } /** * Signs a blob using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signBlob". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest} * @return the request */ public SignBlob signBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) throws java.io.IOException { SignBlob result = new SignBlob(name, content); initialize(result); return result; } public class SignBlob extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignBlobResponse> { private static final String REST_PATH = "v1/{+name}:signBlob"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Signs a blob using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signBlob". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation. * <p> {@link * SignBlob#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest} * @since 1.13 */ protected SignBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignBlobResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public SignBlob set$Xgafv(java.lang.String $Xgafv) { return (SignBlob) super.set$Xgafv($Xgafv); } @Override public SignBlob setAccessToken(java.lang.String accessToken) { return (SignBlob) super.setAccessToken(accessToken); } @Override public SignBlob setAlt(java.lang.String alt) { return (SignBlob) super.setAlt(alt); } @Override public SignBlob setCallback(java.lang.String callback) { return (SignBlob) super.setCallback(callback); } @Override public SignBlob setFields(java.lang.String fields) { return (SignBlob) super.setFields(fields); } @Override public SignBlob setKey(java.lang.String key) { return (SignBlob) super.setKey(key); } @Override public SignBlob setOauthToken(java.lang.String oauthToken) { return (SignBlob) super.setOauthToken(oauthToken); } @Override public SignBlob setPrettyPrint(java.lang.Boolean prettyPrint) { return (SignBlob) super.setPrettyPrint(prettyPrint); } @Override public SignBlob setQuotaUser(java.lang.String quotaUser) { return (SignBlob) super.setQuotaUser(quotaUser); } @Override public SignBlob setUploadType(java.lang.String uploadType) { return (SignBlob) super.setUploadType(uploadType); } @Override public SignBlob setUploadProtocol(java.lang.String uploadProtocol) { return (SignBlob) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public SignBlob setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public SignBlob set(String parameterName, Object value) { return (SignBlob) super.set(parameterName, value); } } /** * Signs a JWT using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signJwt". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest} * @return the request */ public SignJwt signJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) throws java.io.IOException { SignJwt result = new SignJwt(name, content); initialize(result); return result; } public class SignJwt extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignJwtResponse> { private static final String REST_PATH = "v1/{+name}:signJwt"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Signs a JWT using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signJwt". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation. * <p> {@link * SignJwt#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest} * @since 1.13 */ protected SignJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignJwtResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public SignJwt set$Xgafv(java.lang.String $Xgafv) { return (SignJwt) super.set$Xgafv($Xgafv); } @Override public SignJwt setAccessToken(java.lang.String accessToken) { return (SignJwt) super.setAccessToken(accessToken); } @Override public SignJwt setAlt(java.lang.String alt) { return (SignJwt) super.setAlt(alt); } @Override public SignJwt setCallback(java.lang.String callback) { return (SignJwt) super.setCallback(callback); } @Override public SignJwt setFields(java.lang.String fields) { return (SignJwt) super.setFields(fields); } @Override public SignJwt setKey(java.lang.String key) { return (SignJwt) super.setKey(key); } @Override public SignJwt setOauthToken(java.lang.String oauthToken) { return (SignJwt) super.setOauthToken(oauthToken); } @Override public SignJwt setPrettyPrint(java.lang.Boolean prettyPrint) { return (SignJwt) super.setPrettyPrint(prettyPrint); } @Override public SignJwt setQuotaUser(java.lang.String quotaUser) { return (SignJwt) super.setQuotaUser(quotaUser); } @Override public SignJwt setUploadType(java.lang.String uploadType) { return (SignJwt) super.setUploadType(uploadType); } @Override public SignJwt setUploadProtocol(java.lang.String uploadProtocol) { return (SignJwt) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public SignJwt setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public SignJwt set(String parameterName, Object value) { return (SignJwt) super.set(parameterName, value); } } } } /** * Builder for {@link IAMCredentials}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link IAMCredentials}. */ @Override public IAMCredentials build() { return new IAMCredentials(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link IAMCredentialsRequestInitializer}. * * @since 1.12 */ public Builder setIAMCredentialsRequestInitializer( IAMCredentialsRequestInitializer iamcredentialsRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(iamcredentialsRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
googleapis/google-api-java-client-services
36,840
clients/google-api-services-iamcredentials/v1/1.29.2/com/google/api/services/iamcredentials/v1/IAMCredentials.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.iamcredentials.v1; /** * Service definition for IAMCredentials (v1). * * <p> * Creates short-lived, limited-privilege credentials for IAM service accounts. * </p> * * <p> * For more information about this service, see the * <a href="https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link IAMCredentialsRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class IAMCredentials extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.29.2 of the IAM Service Account Credentials API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://iamcredentials.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public IAMCredentials(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ IAMCredentials(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Projects collection. * * <p>The typical use is:</p> * <pre> * {@code IAMCredentials iamcredentials = new IAMCredentials(...);} * {@code IAMCredentials.Projects.List request = iamcredentials.projects().list(parameters ...)} * </pre> * * @return the resource collection */ public Projects projects() { return new Projects(); } /** * The "projects" collection of methods. */ public class Projects { /** * An accessor for creating requests from the ServiceAccounts collection. * * <p>The typical use is:</p> * <pre> * {@code IAMCredentials iamcredentials = new IAMCredentials(...);} * {@code IAMCredentials.ServiceAccounts.List request = iamcredentials.serviceAccounts().list(parameters ...)} * </pre> * * @return the resource collection */ public ServiceAccounts serviceAccounts() { return new ServiceAccounts(); } /** * The "serviceAccounts" collection of methods. */ public class ServiceAccounts { /** * Generates an OAuth 2.0 access token for a service account. * * Create a request for the method "serviceAccounts.generateAccessToken". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote * operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest} * @return the request */ public GenerateAccessToken generateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) throws java.io.IOException { GenerateAccessToken result = new GenerateAccessToken(name, content); initialize(result); return result; } public class GenerateAccessToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse> { private static final String REST_PATH = "v1/{+name}:generateAccessToken"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Generates an OAuth 2.0 access token for a service account. * * Create a request for the method "serviceAccounts.generateAccessToken". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote * operation. <p> {@link GenerateAccessToken#initialize(com.google.api.client.googleapis.services. * AbstractGoogleClientRequest)} must be called to initialize this instance immediately after * invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest} * @since 1.13 */ protected GenerateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public GenerateAccessToken set$Xgafv(java.lang.String $Xgafv) { return (GenerateAccessToken) super.set$Xgafv($Xgafv); } @Override public GenerateAccessToken setAccessToken(java.lang.String accessToken) { return (GenerateAccessToken) super.setAccessToken(accessToken); } @Override public GenerateAccessToken setAlt(java.lang.String alt) { return (GenerateAccessToken) super.setAlt(alt); } @Override public GenerateAccessToken setCallback(java.lang.String callback) { return (GenerateAccessToken) super.setCallback(callback); } @Override public GenerateAccessToken setFields(java.lang.String fields) { return (GenerateAccessToken) super.setFields(fields); } @Override public GenerateAccessToken setKey(java.lang.String key) { return (GenerateAccessToken) super.setKey(key); } @Override public GenerateAccessToken setOauthToken(java.lang.String oauthToken) { return (GenerateAccessToken) super.setOauthToken(oauthToken); } @Override public GenerateAccessToken setPrettyPrint(java.lang.Boolean prettyPrint) { return (GenerateAccessToken) super.setPrettyPrint(prettyPrint); } @Override public GenerateAccessToken setQuotaUser(java.lang.String quotaUser) { return (GenerateAccessToken) super.setQuotaUser(quotaUser); } @Override public GenerateAccessToken setUploadType(java.lang.String uploadType) { return (GenerateAccessToken) super.setUploadType(uploadType); } @Override public GenerateAccessToken setUploadProtocol(java.lang.String uploadProtocol) { return (GenerateAccessToken) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public GenerateAccessToken setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public GenerateAccessToken set(String parameterName, Object value) { return (GenerateAccessToken) super.set(parameterName, value); } } /** * Generates an OpenID Connect ID token for a service account. * * Create a request for the method "serviceAccounts.generateIdToken". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote * operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest} * @return the request */ public GenerateIdToken generateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) throws java.io.IOException { GenerateIdToken result = new GenerateIdToken(name, content); initialize(result); return result; } public class GenerateIdToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse> { private static final String REST_PATH = "v1/{+name}:generateIdToken"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Generates an OpenID Connect ID token for a service account. * * Create a request for the method "serviceAccounts.generateIdToken". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote * operation. <p> {@link GenerateIdToken#initialize(com.google.api.client.googleapis.services.Abst * ractGoogleClientRequest)} must be called to initialize this instance immediately after invoking * the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest} * @since 1.13 */ protected GenerateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public GenerateIdToken set$Xgafv(java.lang.String $Xgafv) { return (GenerateIdToken) super.set$Xgafv($Xgafv); } @Override public GenerateIdToken setAccessToken(java.lang.String accessToken) { return (GenerateIdToken) super.setAccessToken(accessToken); } @Override public GenerateIdToken setAlt(java.lang.String alt) { return (GenerateIdToken) super.setAlt(alt); } @Override public GenerateIdToken setCallback(java.lang.String callback) { return (GenerateIdToken) super.setCallback(callback); } @Override public GenerateIdToken setFields(java.lang.String fields) { return (GenerateIdToken) super.setFields(fields); } @Override public GenerateIdToken setKey(java.lang.String key) { return (GenerateIdToken) super.setKey(key); } @Override public GenerateIdToken setOauthToken(java.lang.String oauthToken) { return (GenerateIdToken) super.setOauthToken(oauthToken); } @Override public GenerateIdToken setPrettyPrint(java.lang.Boolean prettyPrint) { return (GenerateIdToken) super.setPrettyPrint(prettyPrint); } @Override public GenerateIdToken setQuotaUser(java.lang.String quotaUser) { return (GenerateIdToken) super.setQuotaUser(quotaUser); } @Override public GenerateIdToken setUploadType(java.lang.String uploadType) { return (GenerateIdToken) super.setUploadType(uploadType); } @Override public GenerateIdToken setUploadProtocol(java.lang.String uploadProtocol) { return (GenerateIdToken) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public GenerateIdToken setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public GenerateIdToken set(String parameterName, Object value) { return (GenerateIdToken) super.set(parameterName, value); } } /** * Signs a blob using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signBlob". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest} * @return the request */ public SignBlob signBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) throws java.io.IOException { SignBlob result = new SignBlob(name, content); initialize(result); return result; } public class SignBlob extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignBlobResponse> { private static final String REST_PATH = "v1/{+name}:signBlob"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Signs a blob using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signBlob". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation. * <p> {@link * SignBlob#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest} * @since 1.13 */ protected SignBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignBlobResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public SignBlob set$Xgafv(java.lang.String $Xgafv) { return (SignBlob) super.set$Xgafv($Xgafv); } @Override public SignBlob setAccessToken(java.lang.String accessToken) { return (SignBlob) super.setAccessToken(accessToken); } @Override public SignBlob setAlt(java.lang.String alt) { return (SignBlob) super.setAlt(alt); } @Override public SignBlob setCallback(java.lang.String callback) { return (SignBlob) super.setCallback(callback); } @Override public SignBlob setFields(java.lang.String fields) { return (SignBlob) super.setFields(fields); } @Override public SignBlob setKey(java.lang.String key) { return (SignBlob) super.setKey(key); } @Override public SignBlob setOauthToken(java.lang.String oauthToken) { return (SignBlob) super.setOauthToken(oauthToken); } @Override public SignBlob setPrettyPrint(java.lang.Boolean prettyPrint) { return (SignBlob) super.setPrettyPrint(prettyPrint); } @Override public SignBlob setQuotaUser(java.lang.String quotaUser) { return (SignBlob) super.setQuotaUser(quotaUser); } @Override public SignBlob setUploadType(java.lang.String uploadType) { return (SignBlob) super.setUploadType(uploadType); } @Override public SignBlob setUploadProtocol(java.lang.String uploadProtocol) { return (SignBlob) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public SignBlob setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public SignBlob set(String parameterName, Object value) { return (SignBlob) super.set(parameterName, value); } } /** * Signs a JWT using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signJwt". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest} * @return the request */ public SignJwt signJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) throws java.io.IOException { SignJwt result = new SignJwt(name, content); initialize(result); return result; } public class SignJwt extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignJwtResponse> { private static final String REST_PATH = "v1/{+name}:signJwt"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Signs a JWT using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signJwt". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation. * <p> {@link * SignJwt#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest} * @since 1.13 */ protected SignJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignJwtResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public SignJwt set$Xgafv(java.lang.String $Xgafv) { return (SignJwt) super.set$Xgafv($Xgafv); } @Override public SignJwt setAccessToken(java.lang.String accessToken) { return (SignJwt) super.setAccessToken(accessToken); } @Override public SignJwt setAlt(java.lang.String alt) { return (SignJwt) super.setAlt(alt); } @Override public SignJwt setCallback(java.lang.String callback) { return (SignJwt) super.setCallback(callback); } @Override public SignJwt setFields(java.lang.String fields) { return (SignJwt) super.setFields(fields); } @Override public SignJwt setKey(java.lang.String key) { return (SignJwt) super.setKey(key); } @Override public SignJwt setOauthToken(java.lang.String oauthToken) { return (SignJwt) super.setOauthToken(oauthToken); } @Override public SignJwt setPrettyPrint(java.lang.Boolean prettyPrint) { return (SignJwt) super.setPrettyPrint(prettyPrint); } @Override public SignJwt setQuotaUser(java.lang.String quotaUser) { return (SignJwt) super.setQuotaUser(quotaUser); } @Override public SignJwt setUploadType(java.lang.String uploadType) { return (SignJwt) super.setUploadType(uploadType); } @Override public SignJwt setUploadProtocol(java.lang.String uploadProtocol) { return (SignJwt) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public SignJwt setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public SignJwt set(String parameterName, Object value) { return (SignJwt) super.set(parameterName, value); } } } } /** * Builder for {@link IAMCredentials}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link IAMCredentials}. */ @Override public IAMCredentials build() { return new IAMCredentials(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link IAMCredentialsRequestInitializer}. * * @since 1.12 */ public Builder setIAMCredentialsRequestInitializer( IAMCredentialsRequestInitializer iamcredentialsRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(iamcredentialsRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
googleapis/google-cloud-java
36,603
java-alloydb-connectors/proto-google-cloud-alloydb-connectors-v1/src/main/java/com/google/cloud/alloydb/connectors/v1/MetadataExchangeRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/connectors/v1/resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.connectors.v1; /** * * * <pre> * Message used by AlloyDB connectors to exchange client and connection metadata * with the server after a successful TLS handshake. This metadata includes an * IAM token, which is used to authenticate users based on their IAM identity. * The sole purpose of this message is for the use of AlloyDB connectors. * Clients should not rely on this message directly as there can be breaking * changes in the future. * </pre> * * Protobuf type {@code google.cloud.alloydb.connectors.v1.MetadataExchangeRequest} */ public final class MetadataExchangeRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.connectors.v1.MetadataExchangeRequest) MetadataExchangeRequestOrBuilder { private static final long serialVersionUID = 0L; // Use MetadataExchangeRequest.newBuilder() to construct. private MetadataExchangeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MetadataExchangeRequest() { userAgent_ = ""; authType_ = 0; oauth2Token_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MetadataExchangeRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.connectors.v1.ResourcesProto .internal_static_google_cloud_alloydb_connectors_v1_MetadataExchangeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.connectors.v1.ResourcesProto .internal_static_google_cloud_alloydb_connectors_v1_MetadataExchangeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.class, com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.Builder.class); } /** * * * <pre> * AuthType contains all supported authentication types. * </pre> * * Protobuf enum {@code google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType} */ public enum AuthType implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Authentication type is unspecified and DB_NATIVE is used by default * </pre> * * <code>AUTH_TYPE_UNSPECIFIED = 0;</code> */ AUTH_TYPE_UNSPECIFIED(0), /** * * * <pre> * Database native authentication (user/password) * </pre> * * <code>DB_NATIVE = 1;</code> */ DB_NATIVE(1), /** * * * <pre> * Automatic IAM authentication * </pre> * * <code>AUTO_IAM = 2;</code> */ AUTO_IAM(2), UNRECOGNIZED(-1), ; /** * * * <pre> * Authentication type is unspecified and DB_NATIVE is used by default * </pre> * * <code>AUTH_TYPE_UNSPECIFIED = 0;</code> */ public static final int AUTH_TYPE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Database native authentication (user/password) * </pre> * * <code>DB_NATIVE = 1;</code> */ public static final int DB_NATIVE_VALUE = 1; /** * * * <pre> * Automatic IAM authentication * </pre> * * <code>AUTO_IAM = 2;</code> */ public static final int AUTO_IAM_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AuthType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AuthType forNumber(int value) { switch (value) { case 0: return AUTH_TYPE_UNSPECIFIED; case 1: return DB_NATIVE; case 2: return AUTO_IAM; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<AuthType> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<AuthType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<AuthType>() { public AuthType findValueByNumber(int number) { return AuthType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.getDescriptor() .getEnumTypes() .get(0); } private static final AuthType[] VALUES = values(); public static AuthType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private AuthType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType) } public static final int USER_AGENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object userAgent_ = ""; /** * * * <pre> * Optional. Connector information. * </pre> * * <code>string user_agent = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The userAgent. */ @java.lang.Override public java.lang.String getUserAgent() { java.lang.Object ref = userAgent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); userAgent_ = s; return s; } } /** * * * <pre> * Optional. Connector information. * </pre> * * <code>string user_agent = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for userAgent. */ @java.lang.Override public com.google.protobuf.ByteString getUserAgentBytes() { java.lang.Object ref = userAgent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); userAgent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AUTH_TYPE_FIELD_NUMBER = 2; private int authType_ = 0; /** * * * <pre> * Authentication type. * </pre> * * <code>.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType auth_type = 2; * </code> * * @return The enum numeric value on the wire for authType. */ @java.lang.Override public int getAuthTypeValue() { return authType_; } /** * * * <pre> * Authentication type. * </pre> * * <code>.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType auth_type = 2; * </code> * * @return The authType. */ @java.lang.Override public com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType getAuthType() { com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType result = com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType.forNumber( authType_); return result == null ? com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType.UNRECOGNIZED : result; } public static final int OAUTH2_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object oauth2Token_ = ""; /** * * * <pre> * IAM token used for both IAM user authentiation and * `alloydb.instances.connect` permission check. * </pre> * * <code>string oauth2_token = 3;</code> * * @return The oauth2Token. */ @java.lang.Override public java.lang.String getOauth2Token() { java.lang.Object ref = oauth2Token_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); oauth2Token_ = s; return s; } } /** * * * <pre> * IAM token used for both IAM user authentiation and * `alloydb.instances.connect` permission check. * </pre> * * <code>string oauth2_token = 3;</code> * * @return The bytes for oauth2Token. */ @java.lang.Override public com.google.protobuf.ByteString getOauth2TokenBytes() { java.lang.Object ref = oauth2Token_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); oauth2Token_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userAgent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, userAgent_); } if (authType_ != com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType .AUTH_TYPE_UNSPECIFIED .getNumber()) { output.writeEnum(2, authType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(oauth2Token_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, oauth2Token_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userAgent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, userAgent_); } if (authType_ != com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType .AUTH_TYPE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, authType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(oauth2Token_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, oauth2Token_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest)) { return super.equals(obj); } com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest other = (com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest) obj; if (!getUserAgent().equals(other.getUserAgent())) return false; if (authType_ != other.authType_) return false; if (!getOauth2Token().equals(other.getOauth2Token())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + USER_AGENT_FIELD_NUMBER; hash = (53 * hash) + getUserAgent().hashCode(); hash = (37 * hash) + AUTH_TYPE_FIELD_NUMBER; hash = (53 * hash) + authType_; hash = (37 * hash) + OAUTH2_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getOauth2Token().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message used by AlloyDB connectors to exchange client and connection metadata * with the server after a successful TLS handshake. This metadata includes an * IAM token, which is used to authenticate users based on their IAM identity. * The sole purpose of this message is for the use of AlloyDB connectors. * Clients should not rely on this message directly as there can be breaking * changes in the future. * </pre> * * Protobuf type {@code google.cloud.alloydb.connectors.v1.MetadataExchangeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.connectors.v1.MetadataExchangeRequest) com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.connectors.v1.ResourcesProto .internal_static_google_cloud_alloydb_connectors_v1_MetadataExchangeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.connectors.v1.ResourcesProto .internal_static_google_cloud_alloydb_connectors_v1_MetadataExchangeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.class, com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.Builder.class); } // Construct using com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; userAgent_ = ""; authType_ = 0; oauth2Token_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.connectors.v1.ResourcesProto .internal_static_google_cloud_alloydb_connectors_v1_MetadataExchangeRequest_descriptor; } @java.lang.Override public com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest getDefaultInstanceForType() { return com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest build() { com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest buildPartial() { com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest result = new com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.userAgent_ = userAgent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.authType_ = authType_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.oauth2Token_ = oauth2Token_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest) { return mergeFrom((com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest other) { if (other == com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.getDefaultInstance()) return this; if (!other.getUserAgent().isEmpty()) { userAgent_ = other.userAgent_; bitField0_ |= 0x00000001; onChanged(); } if (other.authType_ != 0) { setAuthTypeValue(other.getAuthTypeValue()); } if (!other.getOauth2Token().isEmpty()) { oauth2Token_ = other.oauth2Token_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { userAgent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { authType_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { oauth2Token_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object userAgent_ = ""; /** * * * <pre> * Optional. Connector information. * </pre> * * <code>string user_agent = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The userAgent. */ public java.lang.String getUserAgent() { java.lang.Object ref = userAgent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); userAgent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Connector information. * </pre> * * <code>string user_agent = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for userAgent. */ public com.google.protobuf.ByteString getUserAgentBytes() { java.lang.Object ref = userAgent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); userAgent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Connector information. * </pre> * * <code>string user_agent = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The userAgent to set. * @return This builder for chaining. */ public Builder setUserAgent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } userAgent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Connector information. * </pre> * * <code>string user_agent = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearUserAgent() { userAgent_ = getDefaultInstance().getUserAgent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Optional. Connector information. * </pre> * * <code>string user_agent = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for userAgent to set. * @return This builder for chaining. */ public Builder setUserAgentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); userAgent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int authType_ = 0; /** * * * <pre> * Authentication type. * </pre> * * <code>.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType auth_type = 2; * </code> * * @return The enum numeric value on the wire for authType. */ @java.lang.Override public int getAuthTypeValue() { return authType_; } /** * * * <pre> * Authentication type. * </pre> * * <code>.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType auth_type = 2; * </code> * * @param value The enum numeric value on the wire for authType to set. * @return This builder for chaining. */ public Builder setAuthTypeValue(int value) { authType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Authentication type. * </pre> * * <code>.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType auth_type = 2; * </code> * * @return The authType. */ @java.lang.Override public com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType getAuthType() { com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType result = com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType.forNumber( authType_); return result == null ? com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType.UNRECOGNIZED : result; } /** * * * <pre> * Authentication type. * </pre> * * <code>.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType auth_type = 2; * </code> * * @param value The authType to set. * @return This builder for chaining. */ public Builder setAuthType( com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; authType_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Authentication type. * </pre> * * <code>.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest.AuthType auth_type = 2; * </code> * * @return This builder for chaining. */ public Builder clearAuthType() { bitField0_ = (bitField0_ & ~0x00000002); authType_ = 0; onChanged(); return this; } private java.lang.Object oauth2Token_ = ""; /** * * * <pre> * IAM token used for both IAM user authentiation and * `alloydb.instances.connect` permission check. * </pre> * * <code>string oauth2_token = 3;</code> * * @return The oauth2Token. */ public java.lang.String getOauth2Token() { java.lang.Object ref = oauth2Token_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); oauth2Token_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * IAM token used for both IAM user authentiation and * `alloydb.instances.connect` permission check. * </pre> * * <code>string oauth2_token = 3;</code> * * @return The bytes for oauth2Token. */ public com.google.protobuf.ByteString getOauth2TokenBytes() { java.lang.Object ref = oauth2Token_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); oauth2Token_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * IAM token used for both IAM user authentiation and * `alloydb.instances.connect` permission check. * </pre> * * <code>string oauth2_token = 3;</code> * * @param value The oauth2Token to set. * @return This builder for chaining. */ public Builder setOauth2Token(java.lang.String value) { if (value == null) { throw new NullPointerException(); } oauth2Token_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * IAM token used for both IAM user authentiation and * `alloydb.instances.connect` permission check. * </pre> * * <code>string oauth2_token = 3;</code> * * @return This builder for chaining. */ public Builder clearOauth2Token() { oauth2Token_ = getDefaultInstance().getOauth2Token(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * IAM token used for both IAM user authentiation and * `alloydb.instances.connect` permission check. * </pre> * * <code>string oauth2_token = 3;</code> * * @param value The bytes for oauth2Token to set. * @return This builder for chaining. */ public Builder setOauth2TokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); oauth2Token_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.connectors.v1.MetadataExchangeRequest) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.connectors.v1.MetadataExchangeRequest) private static final com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest(); } public static com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MetadataExchangeRequest> PARSER = new com.google.protobuf.AbstractParser<MetadataExchangeRequest>() { @java.lang.Override public MetadataExchangeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MetadataExchangeRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MetadataExchangeRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.connectors.v1.MetadataExchangeRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
36,951
jdk/src/share/classes/java/security/AccessController.java
/* * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.security; import sun.security.util.Debug; import sun.reflect.CallerSensitive; import sun.reflect.Reflection; /** * <p> The AccessController class is used for access control operations * and decisions. * * <p> More specifically, the AccessController class is used for * three purposes: * * <ul> * <li> to decide whether an access to a critical system * resource is to be allowed or denied, based on the security policy * currently in effect, * <li>to mark code as being "privileged", thus affecting subsequent * access determinations, and * <li>to obtain a "snapshot" of the current calling context so * access-control decisions from a different context can be made with * respect to the saved context. </ul> * * <p> The {@link #checkPermission(Permission) checkPermission} method * determines whether the access request indicated by a specified * permission should be granted or denied. A sample call appears * below. In this example, {@code checkPermission} will determine * whether or not to grant "read" access to the file named "testFile" in * the "/temp" directory. * * <pre> * * FilePermission perm = new FilePermission("/temp/testFile", "read"); * AccessController.checkPermission(perm); * * </pre> * * <p> If a requested access is allowed, * {@code checkPermission} returns quietly. If denied, an * AccessControlException is * thrown. AccessControlException can also be thrown if the requested * permission is of an incorrect type or contains an invalid value. * Such information is given whenever possible. * * Suppose the current thread traversed m callers, in the order of caller 1 * to caller 2 to caller m. Then caller m invoked the * {@code checkPermission} method. * The {@code checkPermission} method determines whether access * is granted or denied based on the following algorithm: * * <pre> {@code * for (int i = m; i > 0; i--) { * * if (caller i's domain does not have the permission) * throw AccessControlException * * else if (caller i is marked as privileged) { * if (a context was specified in the call to doPrivileged) * context.checkPermission(permission) * if (limited permissions were specified in the call to doPrivileged) { * for (each limited permission) { * if (the limited permission implies the requested permission) * return; * } * } else * return; * } * } * * // Next, check the context inherited when the thread was created. * // Whenever a new thread is created, the AccessControlContext at * // that time is stored and associated with the new thread, as the * // "inherited" context. * * inheritedContext.checkPermission(permission); * }</pre> * * <p> A caller can be marked as being "privileged" * (see {@link #doPrivileged(PrivilegedAction) doPrivileged} and below). * When making access control decisions, the {@code checkPermission} * method stops checking if it reaches a caller that * was marked as "privileged" via a {@code doPrivileged} * call without a context argument (see below for information about a * context argument). If that caller's domain has the * specified permission and at least one limiting permission argument (if any) * implies the requested permission, no further checking is done and * {@code checkPermission} * returns quietly, indicating that the requested access is allowed. * If that domain does not have the specified permission, an exception * is thrown, as usual. If the caller's domain had the specified permission * but it was not implied by any limiting permission arguments given in the call * to {@code doPrivileged} then the permission checking continues * until there are no more callers or another {@code doPrivileged} * call matches the requested permission and returns normally. * * <p> The normal use of the "privileged" feature is as follows. If you * don't need to return a value from within the "privileged" block, do * the following: * * <pre> {@code * somemethod() { * ...normal code here... * AccessController.doPrivileged(new PrivilegedAction<Void>() { * public Void run() { * // privileged code goes here, for example: * System.loadLibrary("awt"); * return null; // nothing to return * } * }); * ...normal code here... * }}</pre> * * <p> * PrivilegedAction is an interface with a single method, named * {@code run}. * The above example shows creation of an implementation * of that interface; a concrete implementation of the * {@code run} method is supplied. * When the call to {@code doPrivileged} is made, an * instance of the PrivilegedAction implementation is passed * to it. The {@code doPrivileged} method calls the * {@code run} method from the PrivilegedAction * implementation after enabling privileges, and returns the * {@code run} method's return value as the * {@code doPrivileged} return value (which is * ignored in this example). * * <p> If you need to return a value, you can do something like the following: * * <pre> {@code * somemethod() { * ...normal code here... * String user = AccessController.doPrivileged( * new PrivilegedAction<String>() { * public String run() { * return System.getProperty("user.name"); * } * }); * ...normal code here... * }}</pre> * * <p>If the action performed in your {@code run} method could * throw a "checked" exception (those listed in the {@code throws} clause * of a method), then you need to use the * {@code PrivilegedExceptionAction} interface instead of the * {@code PrivilegedAction} interface: * * <pre> {@code * somemethod() throws FileNotFoundException { * ...normal code here... * try { * FileInputStream fis = AccessController.doPrivileged( * new PrivilegedExceptionAction<FileInputStream>() { * public FileInputStream run() throws FileNotFoundException { * return new FileInputStream("someFile"); * } * }); * } catch (PrivilegedActionException e) { * // e.getException() should be an instance of FileNotFoundException, * // as only "checked" exceptions will be "wrapped" in a * // PrivilegedActionException. * throw (FileNotFoundException) e.getException(); * } * ...normal code here... * }}</pre> * * <p> Be *very* careful in your use of the "privileged" construct, and * always remember to make the privileged code section as small as possible. * You can pass {@code Permission} arguments to further limit the * scope of the "privilege" (see below). * * * <p> Note that {@code checkPermission} always performs security checks * within the context of the currently executing thread. * Sometimes a security check that should be made within a given context * will actually need to be done from within a * <i>different</i> context (for example, from within a worker thread). * The {@link #getContext() getContext} method and * AccessControlContext class are provided * for this situation. The {@code getContext} method takes a "snapshot" * of the current calling context, and places * it in an AccessControlContext object, which it returns. A sample call is * the following: * * <pre> * * AccessControlContext acc = AccessController.getContext() * * </pre> * * <p> * AccessControlContext itself has a {@code checkPermission} method * that makes access decisions based on the context <i>it</i> encapsulates, * rather than that of the current execution thread. * Code within a different context can thus call that method on the * previously-saved AccessControlContext object. A sample call is the * following: * * <pre> * * acc.checkPermission(permission) * * </pre> * * <p> There are also times where you don't know a priori which permissions * to check the context against. In these cases you can use the * doPrivileged method that takes a context. You can also limit the scope * of the privileged code by passing additional {@code Permission} * parameters. * * <pre> {@code * somemethod() { * AccessController.doPrivileged(new PrivilegedAction<Object>() { * public Object run() { * // Code goes here. Any permission checks within this * // run method will require that the intersection of the * // caller's protection domain and the snapshot's * // context have the desired permission. If a requested * // permission is not implied by the limiting FilePermission * // argument then checking of the thread continues beyond the * // caller of doPrivileged. * } * }, acc, new FilePermission("/temp/*", read)); * ...normal code here... * }}</pre> * <p> Passing a limiting {@code Permission} argument of an instance of * {@code AllPermission} is equivalent to calling the equivalent * {@code doPrivileged} method without limiting {@code Permission} * arguments. Passing a zero length array of {@code Permission} disables * the code privileges so that checking always continues beyond the caller of * that {@code doPrivileged} method. * * @see AccessControlContext * * @author Li Gong * @author Roland Schemers */ public final class AccessController { /** * Don't allow anyone to instantiate an AccessController */ private AccessController() { } /** * Performs the specified {@code PrivilegedAction} with privileges * enabled. The action is performed with <i>all</i> of the permissions * possessed by the caller's protection domain. * * <p> If the action's {@code run} method throws an (unchecked) * exception, it will propagate through this method. * * <p> Note that any DomainCombiner associated with the current * AccessControlContext will be ignored while the action is performed. * * @param <T> the type of the value returned by the PrivilegedAction's * {@code run} method. * * @param action the action to be performed. * * @return the value returned by the action's {@code run} method. * * @exception NullPointerException if the action is {@code null} * * @see #doPrivileged(PrivilegedAction,AccessControlContext) * @see #doPrivileged(PrivilegedExceptionAction) * @see #doPrivilegedWithCombiner(PrivilegedAction) * @see java.security.DomainCombiner */ @CallerSensitive public static native <T> T doPrivileged(PrivilegedAction<T> action); /** * Performs the specified {@code PrivilegedAction} with privileges * enabled. The action is performed with <i>all</i> of the permissions * possessed by the caller's protection domain. * * <p> If the action's {@code run} method throws an (unchecked) * exception, it will propagate through this method. * * <p> This method preserves the current AccessControlContext's * DomainCombiner (which may be null) while the action is performed. * * @param <T> the type of the value returned by the PrivilegedAction's * {@code run} method. * * @param action the action to be performed. * * @return the value returned by the action's {@code run} method. * * @exception NullPointerException if the action is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see java.security.DomainCombiner * * @since 1.6 */ @CallerSensitive public static <T> T doPrivilegedWithCombiner(PrivilegedAction<T> action) { AccessControlContext acc = getStackAccessControlContext(); if (acc == null) { return AccessController.doPrivileged(action); } DomainCombiner dc = acc.getAssignedCombiner(); return AccessController.doPrivileged(action, preserveCombiner(dc, Reflection.getCallerClass())); } /** * Performs the specified {@code PrivilegedAction} with privileges * enabled and restricted by the specified {@code AccessControlContext}. * The action is performed with the intersection of the permissions * possessed by the caller's protection domain, and those possessed * by the domains represented by the specified {@code AccessControlContext}. * <p> * If the action's {@code run} method throws an (unchecked) exception, * it will propagate through this method. * <p> * If a security manager is installed and the specified * {@code AccessControlContext} was not created by system code and the * caller's {@code ProtectionDomain} has not been granted the * {@literal "createAccessControlContext"} * {@link java.security.SecurityPermission}, then the action is performed * with no permissions. * * @param <T> the type of the value returned by the PrivilegedAction's * {@code run} method. * @param action the action to be performed. * @param context an <i>access control context</i> * representing the restriction to be applied to the * caller's domain's privileges before performing * the specified action. If the context is * {@code null}, then no additional restriction is applied. * * @return the value returned by the action's {@code run} method. * * @exception NullPointerException if the action is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext) */ @CallerSensitive public static native <T> T doPrivileged(PrivilegedAction<T> action, AccessControlContext context); /** * Performs the specified {@code PrivilegedAction} with privileges * enabled and restricted by the specified * {@code AccessControlContext} and with a privilege scope limited * by specified {@code Permission} arguments. * * The action is performed with the intersection of the permissions * possessed by the caller's protection domain, and those possessed * by the domains represented by the specified * {@code AccessControlContext}. * <p> * If the action's {@code run} method throws an (unchecked) exception, * it will propagate through this method. * <p> * If a security manager is installed and the specified * {@code AccessControlContext} was not created by system code and the * caller's {@code ProtectionDomain} has not been granted the * {@literal "createAccessControlContext"} * {@link java.security.SecurityPermission}, then the action is performed * with no permissions. * * @param <T> the type of the value returned by the PrivilegedAction's * {@code run} method. * @param action the action to be performed. * @param context an <i>access control context</i> * representing the restriction to be applied to the * caller's domain's privileges before performing * the specified action. If the context is * {@code null}, * then no additional restriction is applied. * @param perms the {@code Permission} arguments which limit the * scope of the caller's privileges. The number of arguments * is variable. * * @return the value returned by the action's {@code run} method. * * @throws NullPointerException if action or perms or any element of * perms is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext) * * @since 1.8 */ @CallerSensitive public static <T> T doPrivileged(PrivilegedAction<T> action, AccessControlContext context, Permission... perms) { AccessControlContext parent = getContext(); if (perms == null) { throw new NullPointerException("null permissions parameter"); } Class <?> caller = Reflection.getCallerClass(); return AccessController.doPrivileged(action, createWrapper(null, caller, parent, context, perms)); } /** * Performs the specified {@code PrivilegedAction} with privileges * enabled and restricted by the specified * {@code AccessControlContext} and with a privilege scope limited * by specified {@code Permission} arguments. * * The action is performed with the intersection of the permissions * possessed by the caller's protection domain, and those possessed * by the domains represented by the specified * {@code AccessControlContext}. * <p> * If the action's {@code run} method throws an (unchecked) exception, * it will propagate through this method. * * <p> This method preserves the current AccessControlContext's * DomainCombiner (which may be null) while the action is performed. * <p> * If a security manager is installed and the specified * {@code AccessControlContext} was not created by system code and the * caller's {@code ProtectionDomain} has not been granted the * {@literal "createAccessControlContext"} * {@link java.security.SecurityPermission}, then the action is performed * with no permissions. * * @param <T> the type of the value returned by the PrivilegedAction's * {@code run} method. * @param action the action to be performed. * @param context an <i>access control context</i> * representing the restriction to be applied to the * caller's domain's privileges before performing * the specified action. If the context is * {@code null}, * then no additional restriction is applied. * @param perms the {@code Permission} arguments which limit the * scope of the caller's privileges. The number of arguments * is variable. * * @return the value returned by the action's {@code run} method. * * @throws NullPointerException if action or perms or any element of * perms is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext) * @see java.security.DomainCombiner * * @since 1.8 */ @CallerSensitive public static <T> T doPrivilegedWithCombiner(PrivilegedAction<T> action, AccessControlContext context, Permission... perms) { AccessControlContext parent = getContext(); DomainCombiner dc = parent.getCombiner(); if (dc == null && context != null) { dc = context.getCombiner(); } if (perms == null) { throw new NullPointerException("null permissions parameter"); } Class <?> caller = Reflection.getCallerClass(); return AccessController.doPrivileged(action, createWrapper(dc, caller, parent, context, perms)); } /** * Performs the specified {@code PrivilegedExceptionAction} with * privileges enabled. The action is performed with <i>all</i> of the * permissions possessed by the caller's protection domain. * * <p> If the action's {@code run} method throws an <i>unchecked</i> * exception, it will propagate through this method. * * <p> Note that any DomainCombiner associated with the current * AccessControlContext will be ignored while the action is performed. * * @param <T> the type of the value returned by the * PrivilegedExceptionAction's {@code run} method. * * @param action the action to be performed * * @return the value returned by the action's {@code run} method * * @exception PrivilegedActionException if the specified action's * {@code run} method threw a <i>checked</i> exception * @exception NullPointerException if the action is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext) * @see #doPrivilegedWithCombiner(PrivilegedExceptionAction) * @see java.security.DomainCombiner */ @CallerSensitive public static native <T> T doPrivileged(PrivilegedExceptionAction<T> action) throws PrivilegedActionException; /** * Performs the specified {@code PrivilegedExceptionAction} with * privileges enabled. The action is performed with <i>all</i> of the * permissions possessed by the caller's protection domain. * * <p> If the action's {@code run} method throws an <i>unchecked</i> * exception, it will propagate through this method. * * <p> This method preserves the current AccessControlContext's * DomainCombiner (which may be null) while the action is performed. * * @param <T> the type of the value returned by the * PrivilegedExceptionAction's {@code run} method. * * @param action the action to be performed. * * @return the value returned by the action's {@code run} method * * @exception PrivilegedActionException if the specified action's * {@code run} method threw a <i>checked</i> exception * @exception NullPointerException if the action is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedExceptionAction,AccessControlContext) * @see java.security.DomainCombiner * * @since 1.6 */ @CallerSensitive public static <T> T doPrivilegedWithCombiner(PrivilegedExceptionAction<T> action) throws PrivilegedActionException { AccessControlContext acc = getStackAccessControlContext(); if (acc == null) { return AccessController.doPrivileged(action); } DomainCombiner dc = acc.getAssignedCombiner(); return AccessController.doPrivileged(action, preserveCombiner(dc, Reflection.getCallerClass())); } /** * preserve the combiner across the doPrivileged call */ private static AccessControlContext preserveCombiner(DomainCombiner combiner, Class<?> caller) { return createWrapper(combiner, caller, null, null, null); } /** * Create a wrapper to contain the limited privilege scope data. */ private static AccessControlContext createWrapper(DomainCombiner combiner, Class<?> caller, AccessControlContext parent, AccessControlContext context, Permission[] perms) { ProtectionDomain callerPD = getCallerPD(caller); // check if caller is authorized to create context if (context != null && !context.isAuthorized() && System.getSecurityManager() != null && !callerPD.impliesCreateAccessControlContext()) { ProtectionDomain nullPD = new ProtectionDomain(null, null); return new AccessControlContext(new ProtectionDomain[] { nullPD }); } else { return new AccessControlContext(callerPD, combiner, parent, context, perms); } } private static ProtectionDomain getCallerPD(final Class <?> caller) { ProtectionDomain callerPd = doPrivileged (new PrivilegedAction<ProtectionDomain>() { public ProtectionDomain run() { return caller.getProtectionDomain(); } }); return callerPd; } /** * Performs the specified {@code PrivilegedExceptionAction} with * privileges enabled and restricted by the specified * {@code AccessControlContext}. The action is performed with the * intersection of the permissions possessed by the caller's * protection domain, and those possessed by the domains represented by the * specified {@code AccessControlContext}. * <p> * If the action's {@code run} method throws an <i>unchecked</i> * exception, it will propagate through this method. * <p> * If a security manager is installed and the specified * {@code AccessControlContext} was not created by system code and the * caller's {@code ProtectionDomain} has not been granted the * {@literal "createAccessControlContext"} * {@link java.security.SecurityPermission}, then the action is performed * with no permissions. * * @param <T> the type of the value returned by the * PrivilegedExceptionAction's {@code run} method. * @param action the action to be performed * @param context an <i>access control context</i> * representing the restriction to be applied to the * caller's domain's privileges before performing * the specified action. If the context is * {@code null}, then no additional restriction is applied. * * @return the value returned by the action's {@code run} method * * @exception PrivilegedActionException if the specified action's * {@code run} method threw a <i>checked</i> exception * @exception NullPointerException if the action is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedAction,AccessControlContext) */ @CallerSensitive public static native <T> T doPrivileged(PrivilegedExceptionAction<T> action, AccessControlContext context) throws PrivilegedActionException; /** * Performs the specified {@code PrivilegedExceptionAction} with * privileges enabled and restricted by the specified * {@code AccessControlContext} and with a privilege scope limited by * specified {@code Permission} arguments. * * The action is performed with the intersection of the permissions * possessed by the caller's protection domain, and those possessed * by the domains represented by the specified * {@code AccessControlContext}. * <p> * If the action's {@code run} method throws an (unchecked) exception, * it will propagate through this method. * <p> * If a security manager is installed and the specified * {@code AccessControlContext} was not created by system code and the * caller's {@code ProtectionDomain} has not been granted the * {@literal "createAccessControlContext"} * {@link java.security.SecurityPermission}, then the action is performed * with no permissions. * * @param <T> the type of the value returned by the * PrivilegedExceptionAction's {@code run} method. * @param action the action to be performed. * @param context an <i>access control context</i> * representing the restriction to be applied to the * caller's domain's privileges before performing * the specified action. If the context is * {@code null}, * then no additional restriction is applied. * @param perms the {@code Permission} arguments which limit the * scope of the caller's privileges. The number of arguments * is variable. * * @return the value returned by the action's {@code run} method. * * @throws PrivilegedActionException if the specified action's * {@code run} method threw a <i>checked</i> exception * @throws NullPointerException if action or perms or any element of * perms is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedAction,AccessControlContext) * * @since 1.8 */ @CallerSensitive public static <T> T doPrivileged(PrivilegedExceptionAction<T> action, AccessControlContext context, Permission... perms) throws PrivilegedActionException { AccessControlContext parent = getContext(); if (perms == null) { throw new NullPointerException("null permissions parameter"); } Class <?> caller = Reflection.getCallerClass(); return AccessController.doPrivileged(action, createWrapper(null, caller, parent, context, perms)); } /** * Performs the specified {@code PrivilegedExceptionAction} with * privileges enabled and restricted by the specified * {@code AccessControlContext} and with a privilege scope limited by * specified {@code Permission} arguments. * * The action is performed with the intersection of the permissions * possessed by the caller's protection domain, and those possessed * by the domains represented by the specified * {@code AccessControlContext}. * <p> * If the action's {@code run} method throws an (unchecked) exception, * it will propagate through this method. * * <p> This method preserves the current AccessControlContext's * DomainCombiner (which may be null) while the action is performed. * <p> * If a security manager is installed and the specified * {@code AccessControlContext} was not created by system code and the * caller's {@code ProtectionDomain} has not been granted the * {@literal "createAccessControlContext"} * {@link java.security.SecurityPermission}, then the action is performed * with no permissions. * * @param <T> the type of the value returned by the * PrivilegedExceptionAction's {@code run} method. * @param action the action to be performed. * @param context an <i>access control context</i> * representing the restriction to be applied to the * caller's domain's privileges before performing * the specified action. If the context is * {@code null}, * then no additional restriction is applied. * @param perms the {@code Permission} arguments which limit the * scope of the caller's privileges. The number of arguments * is variable. * * @return the value returned by the action's {@code run} method. * * @throws PrivilegedActionException if the specified action's * {@code run} method threw a <i>checked</i> exception * @throws NullPointerException if action or perms or any element of * perms is {@code null} * * @see #doPrivileged(PrivilegedAction) * @see #doPrivileged(PrivilegedAction,AccessControlContext) * @see java.security.DomainCombiner * * @since 1.8 */ @CallerSensitive public static <T> T doPrivilegedWithCombiner(PrivilegedExceptionAction<T> action, AccessControlContext context, Permission... perms) throws PrivilegedActionException { AccessControlContext parent = getContext(); DomainCombiner dc = parent.getCombiner(); if (dc == null && context != null) { dc = context.getCombiner(); } if (perms == null) { throw new NullPointerException("null permissions parameter"); } Class <?> caller = Reflection.getCallerClass(); return AccessController.doPrivileged(action, createWrapper(dc, caller, parent, context, perms)); } /** * Returns the AccessControl context. i.e., it gets * the protection domains of all the callers on the stack, * starting at the first class with a non-null * ProtectionDomain. * * @return the access control context based on the current stack or * null if there was only privileged system code. */ private static native AccessControlContext getStackAccessControlContext(); /** * Returns the "inherited" AccessControl context. This is the context * that existed when the thread was created. Package private so * AccessControlContext can use it. */ static native AccessControlContext getInheritedAccessControlContext(); /** * This method takes a "snapshot" of the current calling context, which * includes the current Thread's inherited AccessControlContext and any * limited privilege scope, and places it in an AccessControlContext object. * This context may then be checked at a later point, possibly in another thread. * * @see AccessControlContext * * @return the AccessControlContext based on the current context. */ public static AccessControlContext getContext() { AccessControlContext acc = getStackAccessControlContext(); if (acc == null) { // all we had was privileged system code. We don't want // to return null though, so we construct a real ACC. return new AccessControlContext(null, true); } else { return acc.optimize(); } } /** * Determines whether the access request indicated by the * specified permission should be allowed or denied, based on * the current AccessControlContext and security policy. * This method quietly returns if the access request * is permitted, or throws an AccessControlException otherwise. The * getPermission method of the AccessControlException returns the * {@code perm} Permission object instance. * * @param perm the requested permission. * * @exception AccessControlException if the specified permission * is not permitted, based on the current security policy. * @exception NullPointerException if the specified permission * is {@code null} and is checked based on the * security policy currently in effect. */ public static void checkPermission(Permission perm) throws AccessControlException { //System.err.println("checkPermission "+perm); //Thread.currentThread().dumpStack(); if (perm == null) { throw new NullPointerException("permission can't be null"); } AccessControlContext stack = getStackAccessControlContext(); // if context is null, we had privileged system code on the stack. if (stack == null) { Debug debug = AccessControlContext.getDebug(); boolean dumpDebug = false; if (debug != null) { dumpDebug = !Debug.isOn("codebase="); dumpDebug &= !Debug.isOn("permission=") || Debug.isOn("permission=" + perm.getClass().getCanonicalName()); } if (dumpDebug && Debug.isOn("stack")) { Thread.dumpStack(); } if (dumpDebug && Debug.isOn("domain")) { debug.println("domain (context is null)"); } if (dumpDebug) { debug.println("access allowed "+perm); } return; } AccessControlContext acc = stack.optimize(); acc.checkPermission(perm); } }
apache/sentry
36,680
sentry-service/sentry-service-api/src/gen/thrift/gen-javabean/org/apache/sentry/api/service/thrift/TListSentryPrivilegesByAuthRequest.java
/** * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.sentry.api.service.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class TListSentryPrivilegesByAuthRequest implements org.apache.thrift.TBase<TListSentryPrivilegesByAuthRequest, TListSentryPrivilegesByAuthRequest._Fields>, java.io.Serializable, Cloneable, Comparable<TListSentryPrivilegesByAuthRequest> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TListSentryPrivilegesByAuthRequest"); private static final org.apache.thrift.protocol.TField PROTOCOL_VERSION_FIELD_DESC = new org.apache.thrift.protocol.TField("protocol_version", org.apache.thrift.protocol.TType.I32, (short)1); private static final org.apache.thrift.protocol.TField REQUESTOR_USER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("requestorUserName", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField AUTHORIZABLE_SET_FIELD_DESC = new org.apache.thrift.protocol.TField("authorizableSet", org.apache.thrift.protocol.TType.SET, (short)3); private static final org.apache.thrift.protocol.TField GROUPS_FIELD_DESC = new org.apache.thrift.protocol.TField("groups", org.apache.thrift.protocol.TType.SET, (short)4); private static final org.apache.thrift.protocol.TField ROLE_SET_FIELD_DESC = new org.apache.thrift.protocol.TField("roleSet", org.apache.thrift.protocol.TType.STRUCT, (short)5); private static final org.apache.thrift.protocol.TField USERS_FIELD_DESC = new org.apache.thrift.protocol.TField("users", org.apache.thrift.protocol.TType.SET, (short)6); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TListSentryPrivilegesByAuthRequestStandardSchemeFactory()); schemes.put(TupleScheme.class, new TListSentryPrivilegesByAuthRequestTupleSchemeFactory()); } private int protocol_version; // required private String requestorUserName; // required private Set<TSentryAuthorizable> authorizableSet; // required private Set<String> groups; // optional private TSentryActiveRoleSet roleSet; // optional private Set<String> users; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { PROTOCOL_VERSION((short)1, "protocol_version"), REQUESTOR_USER_NAME((short)2, "requestorUserName"), AUTHORIZABLE_SET((short)3, "authorizableSet"), GROUPS((short)4, "groups"), ROLE_SET((short)5, "roleSet"), USERS((short)6, "users"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // PROTOCOL_VERSION return PROTOCOL_VERSION; case 2: // REQUESTOR_USER_NAME return REQUESTOR_USER_NAME; case 3: // AUTHORIZABLE_SET return AUTHORIZABLE_SET; case 4: // GROUPS return GROUPS; case 5: // ROLE_SET return ROLE_SET; case 6: // USERS return USERS; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __PROTOCOL_VERSION_ISSET_ID = 0; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.GROUPS,_Fields.ROLE_SET,_Fields.USERS}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.PROTOCOL_VERSION, new org.apache.thrift.meta_data.FieldMetaData("protocol_version", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.REQUESTOR_USER_NAME, new org.apache.thrift.meta_data.FieldMetaData("requestorUserName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.AUTHORIZABLE_SET, new org.apache.thrift.meta_data.FieldMetaData("authorizableSet", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TSentryAuthorizable.class)))); tmpMap.put(_Fields.GROUPS, new org.apache.thrift.meta_data.FieldMetaData("groups", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); tmpMap.put(_Fields.ROLE_SET, new org.apache.thrift.meta_data.FieldMetaData("roleSet", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TSentryActiveRoleSet.class))); tmpMap.put(_Fields.USERS, new org.apache.thrift.meta_data.FieldMetaData("users", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TListSentryPrivilegesByAuthRequest.class, metaDataMap); } public TListSentryPrivilegesByAuthRequest() { this.protocol_version = 2; } public TListSentryPrivilegesByAuthRequest( int protocol_version, String requestorUserName, Set<TSentryAuthorizable> authorizableSet) { this(); this.protocol_version = protocol_version; setProtocol_versionIsSet(true); this.requestorUserName = requestorUserName; this.authorizableSet = authorizableSet; } /** * Performs a deep copy on <i>other</i>. */ public TListSentryPrivilegesByAuthRequest(TListSentryPrivilegesByAuthRequest other) { __isset_bitfield = other.__isset_bitfield; this.protocol_version = other.protocol_version; if (other.isSetRequestorUserName()) { this.requestorUserName = other.requestorUserName; } if (other.isSetAuthorizableSet()) { Set<TSentryAuthorizable> __this__authorizableSet = new HashSet<TSentryAuthorizable>(other.authorizableSet.size()); for (TSentryAuthorizable other_element : other.authorizableSet) { __this__authorizableSet.add(new TSentryAuthorizable(other_element)); } this.authorizableSet = __this__authorizableSet; } if (other.isSetGroups()) { Set<String> __this__groups = new HashSet<String>(other.groups); this.groups = __this__groups; } if (other.isSetRoleSet()) { this.roleSet = new TSentryActiveRoleSet(other.roleSet); } if (other.isSetUsers()) { Set<String> __this__users = new HashSet<String>(other.users); this.users = __this__users; } } public TListSentryPrivilegesByAuthRequest deepCopy() { return new TListSentryPrivilegesByAuthRequest(this); } @Override public void clear() { this.protocol_version = 2; this.requestorUserName = null; this.authorizableSet = null; this.groups = null; this.roleSet = null; this.users = null; } public int getProtocol_version() { return this.protocol_version; } public void setProtocol_version(int protocol_version) { this.protocol_version = protocol_version; setProtocol_versionIsSet(true); } public void unsetProtocol_version() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __PROTOCOL_VERSION_ISSET_ID); } /** Returns true if field protocol_version is set (has been assigned a value) and false otherwise */ public boolean isSetProtocol_version() { return EncodingUtils.testBit(__isset_bitfield, __PROTOCOL_VERSION_ISSET_ID); } public void setProtocol_versionIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __PROTOCOL_VERSION_ISSET_ID, value); } public String getRequestorUserName() { return this.requestorUserName; } public void setRequestorUserName(String requestorUserName) { this.requestorUserName = requestorUserName; } public void unsetRequestorUserName() { this.requestorUserName = null; } /** Returns true if field requestorUserName is set (has been assigned a value) and false otherwise */ public boolean isSetRequestorUserName() { return this.requestorUserName != null; } public void setRequestorUserNameIsSet(boolean value) { if (!value) { this.requestorUserName = null; } } public int getAuthorizableSetSize() { return (this.authorizableSet == null) ? 0 : this.authorizableSet.size(); } public java.util.Iterator<TSentryAuthorizable> getAuthorizableSetIterator() { return (this.authorizableSet == null) ? null : this.authorizableSet.iterator(); } public void addToAuthorizableSet(TSentryAuthorizable elem) { if (this.authorizableSet == null) { this.authorizableSet = new HashSet<TSentryAuthorizable>(); } this.authorizableSet.add(elem); } public Set<TSentryAuthorizable> getAuthorizableSet() { return this.authorizableSet; } public void setAuthorizableSet(Set<TSentryAuthorizable> authorizableSet) { this.authorizableSet = authorizableSet; } public void unsetAuthorizableSet() { this.authorizableSet = null; } /** Returns true if field authorizableSet is set (has been assigned a value) and false otherwise */ public boolean isSetAuthorizableSet() { return this.authorizableSet != null; } public void setAuthorizableSetIsSet(boolean value) { if (!value) { this.authorizableSet = null; } } public int getGroupsSize() { return (this.groups == null) ? 0 : this.groups.size(); } public java.util.Iterator<String> getGroupsIterator() { return (this.groups == null) ? null : this.groups.iterator(); } public void addToGroups(String elem) { if (this.groups == null) { this.groups = new HashSet<String>(); } this.groups.add(elem); } public Set<String> getGroups() { return this.groups; } public void setGroups(Set<String> groups) { this.groups = groups; } public void unsetGroups() { this.groups = null; } /** Returns true if field groups is set (has been assigned a value) and false otherwise */ public boolean isSetGroups() { return this.groups != null; } public void setGroupsIsSet(boolean value) { if (!value) { this.groups = null; } } public TSentryActiveRoleSet getRoleSet() { return this.roleSet; } public void setRoleSet(TSentryActiveRoleSet roleSet) { this.roleSet = roleSet; } public void unsetRoleSet() { this.roleSet = null; } /** Returns true if field roleSet is set (has been assigned a value) and false otherwise */ public boolean isSetRoleSet() { return this.roleSet != null; } public void setRoleSetIsSet(boolean value) { if (!value) { this.roleSet = null; } } public int getUsersSize() { return (this.users == null) ? 0 : this.users.size(); } public java.util.Iterator<String> getUsersIterator() { return (this.users == null) ? null : this.users.iterator(); } public void addToUsers(String elem) { if (this.users == null) { this.users = new HashSet<String>(); } this.users.add(elem); } public Set<String> getUsers() { return this.users; } public void setUsers(Set<String> users) { this.users = users; } public void unsetUsers() { this.users = null; } /** Returns true if field users is set (has been assigned a value) and false otherwise */ public boolean isSetUsers() { return this.users != null; } public void setUsersIsSet(boolean value) { if (!value) { this.users = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case PROTOCOL_VERSION: if (value == null) { unsetProtocol_version(); } else { setProtocol_version((Integer)value); } break; case REQUESTOR_USER_NAME: if (value == null) { unsetRequestorUserName(); } else { setRequestorUserName((String)value); } break; case AUTHORIZABLE_SET: if (value == null) { unsetAuthorizableSet(); } else { setAuthorizableSet((Set<TSentryAuthorizable>)value); } break; case GROUPS: if (value == null) { unsetGroups(); } else { setGroups((Set<String>)value); } break; case ROLE_SET: if (value == null) { unsetRoleSet(); } else { setRoleSet((TSentryActiveRoleSet)value); } break; case USERS: if (value == null) { unsetUsers(); } else { setUsers((Set<String>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case PROTOCOL_VERSION: return getProtocol_version(); case REQUESTOR_USER_NAME: return getRequestorUserName(); case AUTHORIZABLE_SET: return getAuthorizableSet(); case GROUPS: return getGroups(); case ROLE_SET: return getRoleSet(); case USERS: return getUsers(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case PROTOCOL_VERSION: return isSetProtocol_version(); case REQUESTOR_USER_NAME: return isSetRequestorUserName(); case AUTHORIZABLE_SET: return isSetAuthorizableSet(); case GROUPS: return isSetGroups(); case ROLE_SET: return isSetRoleSet(); case USERS: return isSetUsers(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof TListSentryPrivilegesByAuthRequest) return this.equals((TListSentryPrivilegesByAuthRequest)that); return false; } public boolean equals(TListSentryPrivilegesByAuthRequest that) { if (that == null) return false; boolean this_present_protocol_version = true; boolean that_present_protocol_version = true; if (this_present_protocol_version || that_present_protocol_version) { if (!(this_present_protocol_version && that_present_protocol_version)) return false; if (this.protocol_version != that.protocol_version) return false; } boolean this_present_requestorUserName = true && this.isSetRequestorUserName(); boolean that_present_requestorUserName = true && that.isSetRequestorUserName(); if (this_present_requestorUserName || that_present_requestorUserName) { if (!(this_present_requestorUserName && that_present_requestorUserName)) return false; if (!this.requestorUserName.equals(that.requestorUserName)) return false; } boolean this_present_authorizableSet = true && this.isSetAuthorizableSet(); boolean that_present_authorizableSet = true && that.isSetAuthorizableSet(); if (this_present_authorizableSet || that_present_authorizableSet) { if (!(this_present_authorizableSet && that_present_authorizableSet)) return false; if (!this.authorizableSet.equals(that.authorizableSet)) return false; } boolean this_present_groups = true && this.isSetGroups(); boolean that_present_groups = true && that.isSetGroups(); if (this_present_groups || that_present_groups) { if (!(this_present_groups && that_present_groups)) return false; if (!this.groups.equals(that.groups)) return false; } boolean this_present_roleSet = true && this.isSetRoleSet(); boolean that_present_roleSet = true && that.isSetRoleSet(); if (this_present_roleSet || that_present_roleSet) { if (!(this_present_roleSet && that_present_roleSet)) return false; if (!this.roleSet.equals(that.roleSet)) return false; } boolean this_present_users = true && this.isSetUsers(); boolean that_present_users = true && that.isSetUsers(); if (this_present_users || that_present_users) { if (!(this_present_users && that_present_users)) return false; if (!this.users.equals(that.users)) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_protocol_version = true; list.add(present_protocol_version); if (present_protocol_version) list.add(protocol_version); boolean present_requestorUserName = true && (isSetRequestorUserName()); list.add(present_requestorUserName); if (present_requestorUserName) list.add(requestorUserName); boolean present_authorizableSet = true && (isSetAuthorizableSet()); list.add(present_authorizableSet); if (present_authorizableSet) list.add(authorizableSet); boolean present_groups = true && (isSetGroups()); list.add(present_groups); if (present_groups) list.add(groups); boolean present_roleSet = true && (isSetRoleSet()); list.add(present_roleSet); if (present_roleSet) list.add(roleSet); boolean present_users = true && (isSetUsers()); list.add(present_users); if (present_users) list.add(users); return list.hashCode(); } @Override public int compareTo(TListSentryPrivilegesByAuthRequest other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetProtocol_version()).compareTo(other.isSetProtocol_version()); if (lastComparison != 0) { return lastComparison; } if (isSetProtocol_version()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.protocol_version, other.protocol_version); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetRequestorUserName()).compareTo(other.isSetRequestorUserName()); if (lastComparison != 0) { return lastComparison; } if (isSetRequestorUserName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.requestorUserName, other.requestorUserName); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetAuthorizableSet()).compareTo(other.isSetAuthorizableSet()); if (lastComparison != 0) { return lastComparison; } if (isSetAuthorizableSet()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.authorizableSet, other.authorizableSet); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetGroups()).compareTo(other.isSetGroups()); if (lastComparison != 0) { return lastComparison; } if (isSetGroups()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.groups, other.groups); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetRoleSet()).compareTo(other.isSetRoleSet()); if (lastComparison != 0) { return lastComparison; } if (isSetRoleSet()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.roleSet, other.roleSet); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetUsers()).compareTo(other.isSetUsers()); if (lastComparison != 0) { return lastComparison; } if (isSetUsers()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.users, other.users); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("TListSentryPrivilegesByAuthRequest("); boolean first = true; sb.append("protocol_version:"); sb.append(this.protocol_version); first = false; if (!first) sb.append(", "); sb.append("requestorUserName:"); if (this.requestorUserName == null) { sb.append("null"); } else { sb.append(this.requestorUserName); } first = false; if (!first) sb.append(", "); sb.append("authorizableSet:"); if (this.authorizableSet == null) { sb.append("null"); } else { sb.append(this.authorizableSet); } first = false; if (isSetGroups()) { if (!first) sb.append(", "); sb.append("groups:"); if (this.groups == null) { sb.append("null"); } else { sb.append(this.groups); } first = false; } if (isSetRoleSet()) { if (!first) sb.append(", "); sb.append("roleSet:"); if (this.roleSet == null) { sb.append("null"); } else { sb.append(this.roleSet); } first = false; } if (isSetUsers()) { if (!first) sb.append(", "); sb.append("users:"); if (this.users == null) { sb.append("null"); } else { sb.append(this.users); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetProtocol_version()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'protocol_version' is unset! Struct:" + toString()); } if (!isSetRequestorUserName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'requestorUserName' is unset! Struct:" + toString()); } if (!isSetAuthorizableSet()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'authorizableSet' is unset! Struct:" + toString()); } // check for sub-struct validity if (roleSet != null) { roleSet.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TListSentryPrivilegesByAuthRequestStandardSchemeFactory implements SchemeFactory { public TListSentryPrivilegesByAuthRequestStandardScheme getScheme() { return new TListSentryPrivilegesByAuthRequestStandardScheme(); } } private static class TListSentryPrivilegesByAuthRequestStandardScheme extends StandardScheme<TListSentryPrivilegesByAuthRequest> { public void read(org.apache.thrift.protocol.TProtocol iprot, TListSentryPrivilegesByAuthRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // PROTOCOL_VERSION if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.protocol_version = iprot.readI32(); struct.setProtocol_versionIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // REQUESTOR_USER_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.requestorUserName = iprot.readString(); struct.setRequestorUserNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // AUTHORIZABLE_SET if (schemeField.type == org.apache.thrift.protocol.TType.SET) { { org.apache.thrift.protocol.TSet _set130 = iprot.readSetBegin(); struct.authorizableSet = new HashSet<TSentryAuthorizable>(2*_set130.size); TSentryAuthorizable _elem131; for (int _i132 = 0; _i132 < _set130.size; ++_i132) { _elem131 = new TSentryAuthorizable(); _elem131.read(iprot); struct.authorizableSet.add(_elem131); } iprot.readSetEnd(); } struct.setAuthorizableSetIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // GROUPS if (schemeField.type == org.apache.thrift.protocol.TType.SET) { { org.apache.thrift.protocol.TSet _set133 = iprot.readSetBegin(); struct.groups = new HashSet<String>(2*_set133.size); String _elem134; for (int _i135 = 0; _i135 < _set133.size; ++_i135) { _elem134 = iprot.readString(); struct.groups.add(_elem134); } iprot.readSetEnd(); } struct.setGroupsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // ROLE_SET if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.roleSet = new TSentryActiveRoleSet(); struct.roleSet.read(iprot); struct.setRoleSetIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // USERS if (schemeField.type == org.apache.thrift.protocol.TType.SET) { { org.apache.thrift.protocol.TSet _set136 = iprot.readSetBegin(); struct.users = new HashSet<String>(2*_set136.size); String _elem137; for (int _i138 = 0; _i138 < _set136.size; ++_i138) { _elem137 = iprot.readString(); struct.users.add(_elem137); } iprot.readSetEnd(); } struct.setUsersIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TListSentryPrivilegesByAuthRequest struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(PROTOCOL_VERSION_FIELD_DESC); oprot.writeI32(struct.protocol_version); oprot.writeFieldEnd(); if (struct.requestorUserName != null) { oprot.writeFieldBegin(REQUESTOR_USER_NAME_FIELD_DESC); oprot.writeString(struct.requestorUserName); oprot.writeFieldEnd(); } if (struct.authorizableSet != null) { oprot.writeFieldBegin(AUTHORIZABLE_SET_FIELD_DESC); { oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRUCT, struct.authorizableSet.size())); for (TSentryAuthorizable _iter139 : struct.authorizableSet) { _iter139.write(oprot); } oprot.writeSetEnd(); } oprot.writeFieldEnd(); } if (struct.groups != null) { if (struct.isSetGroups()) { oprot.writeFieldBegin(GROUPS_FIELD_DESC); { oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRING, struct.groups.size())); for (String _iter140 : struct.groups) { oprot.writeString(_iter140); } oprot.writeSetEnd(); } oprot.writeFieldEnd(); } } if (struct.roleSet != null) { if (struct.isSetRoleSet()) { oprot.writeFieldBegin(ROLE_SET_FIELD_DESC); struct.roleSet.write(oprot); oprot.writeFieldEnd(); } } if (struct.users != null) { if (struct.isSetUsers()) { oprot.writeFieldBegin(USERS_FIELD_DESC); { oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRING, struct.users.size())); for (String _iter141 : struct.users) { oprot.writeString(_iter141); } oprot.writeSetEnd(); } oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TListSentryPrivilegesByAuthRequestTupleSchemeFactory implements SchemeFactory { public TListSentryPrivilegesByAuthRequestTupleScheme getScheme() { return new TListSentryPrivilegesByAuthRequestTupleScheme(); } } private static class TListSentryPrivilegesByAuthRequestTupleScheme extends TupleScheme<TListSentryPrivilegesByAuthRequest> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TListSentryPrivilegesByAuthRequest struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; oprot.writeI32(struct.protocol_version); oprot.writeString(struct.requestorUserName); { oprot.writeI32(struct.authorizableSet.size()); for (TSentryAuthorizable _iter142 : struct.authorizableSet) { _iter142.write(oprot); } } BitSet optionals = new BitSet(); if (struct.isSetGroups()) { optionals.set(0); } if (struct.isSetRoleSet()) { optionals.set(1); } if (struct.isSetUsers()) { optionals.set(2); } oprot.writeBitSet(optionals, 3); if (struct.isSetGroups()) { { oprot.writeI32(struct.groups.size()); for (String _iter143 : struct.groups) { oprot.writeString(_iter143); } } } if (struct.isSetRoleSet()) { struct.roleSet.write(oprot); } if (struct.isSetUsers()) { { oprot.writeI32(struct.users.size()); for (String _iter144 : struct.users) { oprot.writeString(_iter144); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TListSentryPrivilegesByAuthRequest struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.protocol_version = iprot.readI32(); struct.setProtocol_versionIsSet(true); struct.requestorUserName = iprot.readString(); struct.setRequestorUserNameIsSet(true); { org.apache.thrift.protocol.TSet _set145 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.authorizableSet = new HashSet<TSentryAuthorizable>(2*_set145.size); TSentryAuthorizable _elem146; for (int _i147 = 0; _i147 < _set145.size; ++_i147) { _elem146 = new TSentryAuthorizable(); _elem146.read(iprot); struct.authorizableSet.add(_elem146); } } struct.setAuthorizableSetIsSet(true); BitSet incoming = iprot.readBitSet(3); if (incoming.get(0)) { { org.apache.thrift.protocol.TSet _set148 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.groups = new HashSet<String>(2*_set148.size); String _elem149; for (int _i150 = 0; _i150 < _set148.size; ++_i150) { _elem149 = iprot.readString(); struct.groups.add(_elem149); } } struct.setGroupsIsSet(true); } if (incoming.get(1)) { struct.roleSet = new TSentryActiveRoleSet(); struct.roleSet.read(iprot); struct.setRoleSetIsSet(true); } if (incoming.get(2)) { { org.apache.thrift.protocol.TSet _set151 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); struct.users = new HashSet<String>(2*_set151.size); String _elem152; for (int _i153 = 0; _i153 < _set151.size; ++_i153) { _elem152 = iprot.readString(); struct.users.add(_elem152); } } struct.setUsersIsSet(true); } } } }
apache/iotdb
36,864
iotdb-core/node-commons/src/main/java/org/apache/iotdb/commons/schema/tree/AbstractTreeVisitor.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.commons.schema.tree; import org.apache.iotdb.commons.conf.IoTDBConstant; import org.apache.iotdb.commons.path.PartialPath; import org.apache.iotdb.commons.path.PathPatternTree; import org.apache.iotdb.commons.path.PathPatternUtil; import org.apache.iotdb.commons.path.fa.IFAState; import org.apache.iotdb.commons.path.fa.IFATransition; import org.apache.iotdb.commons.path.fa.IPatternFA; import org.apache.iotdb.commons.path.fa.dfa.PatternDFA; import org.apache.iotdb.commons.path.fa.match.IStateMatchInfo; import org.apache.iotdb.commons.path.fa.match.StateMultiMatchInfo; import org.apache.iotdb.commons.path.fa.match.StateSingleMatchInfo; import org.apache.iotdb.commons.schema.SchemaConstant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; /** * This class defines a dfs-based algorithm of tree-traversing with path pattern match, and support * iterating each element of the result. * * <p>This class takes three basis parameters as input: * * <ol> * <li>N root: the root node of the tree to be traversed. * <li>PartialPath patPattern: the pattern of path that the path of target element matches * <li>boolean isPrefixMatch: whether the pathPattern is used for matching the prefix; if so, all * elements with path starting with the matched prefix will be collected * </ol> * * <p>If any tree wants to integrate and use this class. The following steps must be attained: * * <ol> * <li>The node of the tree must implement ITreeNode interface and the generic N should be defined * as the node class. * <li>The result type R should be defined. * <li>Implement the abstract methods, and for the concrete requirements, please refer to the * javadoc of specific method. * </ol> * * @param <N> The node consisting the tree. * @param <R> The result extracted from the tree. */ public abstract class AbstractTreeVisitor<N extends ITreeNode, R> implements SchemaIterator<R> { private static final Logger logger = LoggerFactory.getLogger(AbstractTreeVisitor.class); // command parameters protected N root; // finite automation constructed from given path pattern or pattern tree protected final IPatternFA patternFA; // deterministic finite automation for filtering traversed subtrees private final PatternDFA scopeDFA; private final boolean allScope; // run time variables // stack to store children iterator of visited ancestor private final Deque<VisitorStackEntry> visitorStack = new ArrayDeque<>(); // stack to store ancestor nodes and their FA state match info private final List<AncestorStackEntry> ancestorStack = new ArrayList<>(); // the FA match process can traceback since this ancestor in ancestor stack // this field will be updated during iterating children in all subclass of // AbstractChildrenIterator private int firstAncestorOfTraceback = -1; // the FA state match info of current node // this field will be updated during iterating children in all subclass of // AbstractChildrenIterator private IStateMatchInfo currentStateMatchInfo; // whether to visit the subtree of current node private boolean shouldVisitSubtree; // record exception if failed private Throwable throwable; // cached result variables private N nextMatchedNode; // only used for wrapper protected AbstractTreeVisitor() { root = null; patternFA = null; scopeDFA = SchemaConstant.ALL_MATCH_DFA; allScope = true; } protected AbstractTreeVisitor(N root, PartialPath pathPattern, boolean isPrefixMatch) { this(root, pathPattern, isPrefixMatch, null); } protected AbstractTreeVisitor( N root, PartialPath pathPattern, boolean isPrefixMatch, PathPatternTree scope) { this.root = root; boolean usingDFA = false; // Use DFA if there are ** and no other regex nodes in pathPattern for (String pathNode : pathPattern.getNodes()) { if (pathNode == null) { continue; } else if (IoTDBConstant.MULTI_LEVEL_PATH_WILDCARD.equals(pathNode)) { // ** node usingDFA = true; } else if (pathNode.length() > 1 && PathPatternUtil.hasWildcard(pathNode)) { // regex node usingDFA = false; break; } } this.patternFA = usingDFA ? new IPatternFA.Builder().pattern(pathPattern).isPrefixMatch(isPrefixMatch).buildDFA() : new IPatternFA.Builder().pattern(pathPattern).isPrefixMatch(isPrefixMatch).buildNFA(); this.scopeDFA = scope == null ? SchemaConstant.ALL_MATCH_DFA : (PatternDFA) new IPatternFA.Builder().patternTree(scope).buildDFA(); this.allScope = this.scopeDFA == SchemaConstant.ALL_MATCH_DFA; } // Notices: PatternTree must not contain any wildcard protected AbstractTreeVisitor(N root, PathPatternTree fullPathTree, PathPatternTree scope) { this.root = root; this.patternFA = new IPatternFA.Builder().patternTree(fullPathTree).buildDFA(); this.scopeDFA = scope == null ? SchemaConstant.ALL_MATCH_DFA : (PatternDFA) new IPatternFA.Builder().patternTree(scope).buildDFA(); this.allScope = this.scopeDFA == SchemaConstant.ALL_MATCH_DFA; } /** This method must be invoked before iteration */ protected final void initStack() { IFAState initialState = patternFA.getInitialState(); IFATransition transition = patternFA.getPreciseMatchTransition(initialState).get(root.getName()); if (transition == null) { // the visitor stack will be empty and the result of hasNext() will be false return; } IFAState rootState = patternFA.getNextState(initialState, transition); IFAState initScopeState = scopeDFA.getNextState(scopeDFA.getInitialState(), root.getName()); currentStateMatchInfo = new StateSingleMatchInfo(patternFA, rootState, initScopeState); visitorStack.push(new VisitorStackEntry(createChildrenIterator(root), 1)); ancestorStack.add(new AncestorStackEntry(root, currentStateMatchInfo)); } public void reset() { close(); visitorStack.clear(); ancestorStack.clear(); nextMatchedNode = null; firstAncestorOfTraceback = -1; initStack(); } @Override public void close() { if (nextMatchedNode != null && !shouldVisitSubtree) { // release nextMatchedNode releaseNode(nextMatchedNode); } while (!visitorStack.isEmpty()) { popStack(); } } @Override public boolean hasNext() { if (throwable == null && nextMatchedNode == null) { try { getNext(); } catch (Throwable e) { logger.warn(e.getMessage(), e); setFailure(e); } } return throwable == null && nextMatchedNode != null; } @Override public R next() { if (!hasNext()) { throw new NoSuchElementException(); } R result = generateResult(nextMatchedNode); if (!shouldVisitSubtree) { // release nextMatchedNode releaseNode(nextMatchedNode); } nextMatchedNode = null; return result; } private void getNext() { nextMatchedNode = null; VisitorStackEntry stackEntry; AbstractChildrenIterator iterator; while (!visitorStack.isEmpty()) { stackEntry = visitorStack.peek(); iterator = stackEntry.iterator; if (!iterator.hasNext()) { popStack(); continue; } N nextTempNode = iterator.next(); shouldVisitSubtree = false; if (currentStateMatchInfo.hasFinalState()) { if (acceptFullMatchedNode(nextTempNode)) { nextMatchedNode = nextTempNode; } shouldVisitSubtree = shouldVisitSubtreeOfFullMatchedNode(nextTempNode); } else { if (acceptInternalMatchedNode(nextTempNode)) { nextMatchedNode = nextTempNode; } shouldVisitSubtree = shouldVisitSubtreeOfInternalMatchedNode(nextTempNode); } if (shouldVisitSubtree) { pushChildren(nextTempNode); // After adding nextTempNode into ancestorStack, nextTempNode will be released finally. } else if (nextMatchedNode != nextTempNode) { // Else if nextTempNode is not accepted, it needs to be released. releaseNode(nextTempNode); } // Otherwise, it will be released when invoking next() if (nextMatchedNode != null) { return; } } } private void pushChildren(N parent) { visitorStack.push( new VisitorStackEntry( createChildrenIterator(parent), visitorStack.isEmpty() ? 1 : visitorStack.peek().level + 1)); ancestorStack.add(new AncestorStackEntry(parent, currentStateMatchInfo)); } private AbstractChildrenIterator createChildrenIterator(N parent) { if (firstAncestorOfTraceback > -1) { // there may be traceback when try to find the matched state of node return new TraceBackChildrenIterator(parent, currentStateMatchInfo); } else if (currentStateMatchInfo.hasOnlyPreciseMatchTransition()) { // the child can be got directly with the precise value of transition return new PreciseMatchChildrenIterator(parent, currentStateMatchInfo); } else if (currentStateMatchInfo.hasNoPreciseMatchTransition() && currentStateMatchInfo.isSingleFuzzyMatchTransition()) { // only one transition which may match batch children, need to iterate and check all child return new SingleFuzzyMatchChildrenIterator(parent, currentStateMatchInfo); } else { // child may be matched by multi transitions, precise match or fuzzy match, // which results in one child match multi state; need to iterate and check all child return new MultiMatchTransitionChildrenIterator(parent, currentStateMatchInfo); } } private void popStack() { VisitorStackEntry stackEntry = visitorStack.pop(); stackEntry.iterator.close(); // The ancestor pop operation with level check supports the children of one node pushed by // batch. if (!visitorStack.isEmpty() && visitorStack.peek().level < ancestorStack.size()) { AncestorStackEntry ancestorStackEntry = ancestorStack.remove(ancestorStack.size() - 1); releaseNode(ancestorStackEntry.node); if (ancestorStack.size() <= firstAncestorOfTraceback) { firstAncestorOfTraceback = -1; } } } /** * Get full path of parent of current node. This method should be used in {@linkplain * AbstractTreeVisitor#acceptInternalMatchedNode}, {@linkplain * AbstractTreeVisitor#acceptFullMatchedNode},{@linkplain * AbstractTreeVisitor#shouldVisitSubtreeOfInternalMatchedNode} or {@linkplain * AbstractTreeVisitor#shouldVisitSubtreeOfFullMatchedNode}. * * @return full path from traverse start node to the parent of current node */ protected PartialPath getParentPartialPath() { List<String> nodeNames = new ArrayList<>(); Iterator<AncestorStackEntry> iterator = ancestorStack.iterator(); for (int i = 0, size = shouldVisitSubtree ? ancestorStack.size() - 1 : ancestorStack.size(); i < size; i++) { if (iterator.hasNext()) { nodeNames.add(iterator.next().node.getName()); } } return new PartialPath(nodeNames.toArray(new String[0])); } /** * Get partial path from root to node. * * @param node node must be concluded in ancestorStack or nextMatchedNode * @return partial path from traverse start node to the specified node */ protected final PartialPath getPartialPathFromRootToNode(N node) { return new PartialPath(getFullPathFromRootToNode(node)); } /** * Get full path from root to node. * * @param node node must be concluded in ancestorStack or nextMatchedNode * @return full path from traverse start node to the specified node */ protected final String[] getFullPathFromRootToNode(N node) { List<String> nodeNames = new ArrayList<>(); for (AncestorStackEntry entry : ancestorStack) { nodeNames.add(entry.node.getName()); if (entry.node == node) { return nodeNames.toArray(new String[0]); } } nodeNames.add(node.getName()); return nodeNames.toArray(new String[0]); } protected final N getAncestorNodeByLevel(int level) { return ancestorStack.get(level).node; } protected final N getParentOfNextMatchedNode() { if (shouldVisitSubtree) { return ancestorStack.get(ancestorStack.size() - 2).node; } else { return ancestorStack.get(ancestorStack.size() - 1).node; } } /** * Get level from root to NextMatchedNode. Level of root is 0. For example, root.sg.d1.s1, * NextMatchedNode is s1, then return 3. * * @return level from root to NextMatchedNode */ protected final int getLevelOfNextMatchedNode() { if (shouldVisitSubtree) { return ancestorStack.size() - 1; } else { return ancestorStack.size(); } } protected final int getSizeOfAncestor() { return ancestorStack.size(); } protected void setFailure(Throwable e) { this.throwable = e; } public Throwable getFailure() { return throwable; } public boolean isSuccess() { return throwable == null; } // Get a child with the given childName. protected abstract N getChild(N parent, String childName) throws Exception; // Get an iterator of all children. protected abstract Iterator<N> getChildrenIterator(N parent) throws Exception; // Get an iterator of specific children. protected abstract Iterator<N> getChildrenIterator(N parent, Iterator<String> childrenName) throws Exception; /** * Get current children iterator generated by {@link AbstractTreeVisitor#getChildrenIterator} * * @return null if there is no current children iterator */ protected Iterator<N> getCurrentChildrenIterator() { if (visitorStack.isEmpty()) { return null; } else { return visitorStack.peek().iterator.getIterator(); } } // Release a child node. protected void releaseNode(N node) {} // Release an iterator. It is not necessary to deal with all the elements in the iterator. // Only the elements that have been fetched but not returned by next() need to be released. protected void releaseNodeIterator(Iterator<N> nodeIterator) {} /** * Internal-match means the node matches an internal node name of the given path pattern. root.sg * internal match root.sg.**(pattern). This method should be implemented according to concrete * tasks. * * <p>Return whether the subtree of given node should be processed. If return true, the traversing * process will keep traversing the subtree. If return false, the traversing process will skip the * subtree of given node. */ protected abstract boolean shouldVisitSubtreeOfInternalMatchedNode(N node); /** * Full-match means the node matches the last node name of the given path pattern. root.sg.d full * match root.sg.**(pattern) This method should be implemented according to concrete tasks. * * <p>Return whether the subtree of given node should be processed. If return {@code true}, the * traversing process will keep traversing the subtree. If return {@code false}, the traversing * process will skip the subtree of given node. */ protected abstract boolean shouldVisitSubtreeOfFullMatchedNode(final N node); /** Only accepted nodes will be considered for hasNext() and next() */ protected abstract boolean acceptInternalMatchedNode(N node); /** Only accepted nodes will be considered for hasNext() and next() */ protected abstract boolean acceptFullMatchedNode(N node); /** The method used for generating the result based on the matched node. */ protected abstract R generateResult(N nextMatchedNode); private class VisitorStackEntry { // children iterator private final AbstractChildrenIterator iterator; // level of children taken from iterator, start from 1 private final int level; VisitorStackEntry(AbstractChildrenIterator iterator, int level) { this.iterator = iterator; this.level = level; } } private class AncestorStackEntry { private final N node; private final IStateMatchInfo stateMatchInfo; AncestorStackEntry(N node, IStateMatchInfo stateMatchInfo) { this.node = node; this.stateMatchInfo = stateMatchInfo; } } protected final IFAState getNextMatchedScopeState(IFAState currentState, N node) { IFAState nextState = scopeDFA.getNextState(currentState, node.getName()); if (nextState == null && node.getAlias() != null) { return scopeDFA.getNextState(currentState, node.getAlias()); } return nextState; } // implement common iterating logic of different children iterator private abstract class AbstractChildrenIterator implements Iterator<N> { protected final N parent; protected final IFAState currentScopeState; private N nextMatchedChild; protected AbstractChildrenIterator(N parent, IFAState currentScopeState) { this.parent = parent; this.currentScopeState = currentScopeState; } @Override public boolean hasNext() { if (nextMatchedChild == null) { try { getNext(); } catch (Throwable e) { logger.warn(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } return nextMatchedChild != null; } @Override public N next() { if (!hasNext()) { throw new NoSuchElementException(); } N result = nextMatchedChild; nextMatchedChild = null; return result; } protected final void saveResult(N child, IStateMatchInfo stateMatchInfo) { nextMatchedChild = child; currentStateMatchInfo = stateMatchInfo; } protected abstract void getNext() throws Exception; protected final Iterator<N> initChildrenIterator() throws Exception { if (!allScope && scopeDFA.getFuzzyMatchTransitionSize(currentScopeState) == 0) { return getChildrenIterator( parent, scopeDFA.getPreciseMatchTransition(currentScopeState).keySet().iterator()); } else { return getChildrenIterator(parent); } } public abstract Iterator<N> getIterator(); protected void close() { if (nextMatchedChild != null) { releaseNode(nextMatchedChild); } } } // the child can be got directly with the precise value of transition, there's no traceback private class PreciseMatchChildrenIterator extends AbstractChildrenIterator { private final IFAState sourceState; private final Iterator<IFATransition> transitionIterator; private PreciseMatchChildrenIterator(N parent, IStateMatchInfo stateMatchInfo) { super(parent, stateMatchInfo.getScopeMatchedState()); this.sourceState = stateMatchInfo.getOneMatchedState(); transitionIterator = patternFA.getPreciseMatchTransitionIterator(sourceState); } @Override protected void getNext() throws Exception { IFATransition transition; while (transitionIterator.hasNext()) { transition = transitionIterator.next(); N child = getChild(parent, transition.getAcceptEvent()); if (child == null) { continue; } IFAState nextScopeState = allScope ? null : getNextMatchedScopeState(currentScopeState, child); if (!allScope && nextScopeState == null) { releaseNode(child); continue; } saveResult( child, new StateSingleMatchInfo( patternFA, patternFA.getNextState(sourceState, transition), nextScopeState)); return; } } @Override public Iterator<N> getIterator() { return null; } } // only one fuzzy transition which may match batch children, need to iterate and check all // children, // there's no traceback private class SingleFuzzyMatchChildrenIterator extends AbstractChildrenIterator { private final IFAState sourceState; private final IFATransition transition; private Iterator<N> childrenIterator; private SingleFuzzyMatchChildrenIterator(N parent, IStateMatchInfo stateMatchInfo) { super(parent, stateMatchInfo.getScopeMatchedState()); this.sourceState = stateMatchInfo.getOneMatchedState(); this.transition = patternFA.getFuzzyMatchTransitionIterator(sourceState).next(); } @Override protected void getNext() throws Exception { if (childrenIterator == null) { this.childrenIterator = initChildrenIterator(); } N child; while (childrenIterator.hasNext()) { child = childrenIterator.next(); if (tryGetNextState(child, sourceState, transition) == null) { releaseNode(child); continue; } IFAState nextScopeState = allScope ? null : getNextMatchedScopeState(currentScopeState, child); saveResult( child, new StateSingleMatchInfo( patternFA, patternFA.getNextState(sourceState, transition), nextScopeState)); return; } } @Override public Iterator<N> getIterator() { return childrenIterator; } @Override protected void close() { super.close(); if (childrenIterator != null) { releaseNodeIterator(childrenIterator); } } } // child may be matched by multi transitions, precise match or fuzzy match, // which results in one child match multi state; need to iterate and check all child. // the iterating process will try to get the first matched state of a child, and if there are some // rest transitions, there may be traceback when checking the descendents private class MultiMatchTransitionChildrenIterator extends AbstractChildrenIterator { private final IFAState sourceState; private final Map<String, IFATransition> preciseMatchTransitionMap; private Iterator<N> iterator; private MultiMatchTransitionChildrenIterator(N parent, IStateMatchInfo stateMatchInfo) { super(parent, stateMatchInfo.getScopeMatchedState()); this.sourceState = stateMatchInfo.getOneMatchedState(); this.preciseMatchTransitionMap = patternFA.getPreciseMatchTransition(sourceState); } @Override protected void getNext() throws Exception { if (iterator == null) { this.iterator = initChildrenIterator(); } N child; IFAState matchedState = null; Iterator<IFATransition> transitionIterator; IStateMatchInfo stateMatchInfo; while (iterator.hasNext()) { child = iterator.next(); IFAState nextScopeState = allScope ? null : getNextMatchedScopeState(currentScopeState, child); // find first matched state if (!preciseMatchTransitionMap.isEmpty()) { matchedState = tryGetNextState(child, sourceState, preciseMatchTransitionMap); } transitionIterator = patternFA.getFuzzyMatchTransitionIterator(sourceState); if (matchedState == null) { while (transitionIterator.hasNext()) { matchedState = tryGetNextState(child, sourceState, transitionIterator.next()); if (matchedState != null) { break; } } if (matchedState == null) { releaseNode(child); continue; } } // check whether accept the first matched state if (mayTargetNodeType(child) && !matchedState.isFinal()) { // not accept the first matched state since this node may be a target result, check the // other states if (patternFA.mayTransitionOverlap() && transitionIterator.hasNext()) { stateMatchInfo = new StateMultiMatchInfo( patternFA, matchedState, transitionIterator, nextScopeState); firstAncestorOfTraceback = ancestorStack.size(); while (transitionIterator.hasNext()) { matchedState = tryGetNextState(child, sourceState, transitionIterator.next()); if (matchedState != null) { stateMatchInfo.addMatchedState(matchedState); if (matchedState.isFinal()) { break; } } } } else { stateMatchInfo = new StateSingleMatchInfo(patternFA, matchedState, nextScopeState); } } else { // accept the first matched state, directly save it if (patternFA.mayTransitionOverlap() && transitionIterator.hasNext()) { stateMatchInfo = new StateMultiMatchInfo( patternFA, matchedState, transitionIterator, nextScopeState); firstAncestorOfTraceback = ancestorStack.size(); } else { stateMatchInfo = new StateSingleMatchInfo(patternFA, matchedState, nextScopeState); } } saveResult(child, stateMatchInfo); return; } } @Override public Iterator<N> getIterator() { return iterator; } @Override protected void close() { super.close(); if (iterator != null) { releaseNodeIterator(iterator); } } } // there may be traceback when try to find the matched state of node; // the iterating process will try to get the first matched state of a child. private class TraceBackChildrenIterator extends AbstractChildrenIterator { private final IStateMatchInfo sourceStateMatchInfo; private Iterator<N> iterator; TraceBackChildrenIterator(N parent, IStateMatchInfo stateMatchInfo) { super(parent, stateMatchInfo.getScopeMatchedState()); this.sourceStateMatchInfo = stateMatchInfo; } @Override protected void getNext() throws Exception { if (iterator == null) { this.iterator = initChildrenIterator(); } N child; IFAState sourceState; IStateMatchInfo stateMatchInfo; Iterator<IFATransition> transitionIterator; while (iterator.hasNext()) { child = iterator.next(); IFAState nextScopeState = allScope ? null : getNextMatchedScopeState(currentScopeState, child); stateMatchInfo = new StateMultiMatchInfo(patternFA, nextScopeState); if (mayTargetNodeType(child)) { for (int i = 0; i < sourceStateMatchInfo.getMatchedStateSize(); i++) { sourceState = sourceStateMatchInfo.getMatchedState(i); transitionIterator = tryGetNextMatchedState(child, sourceState, stateMatchInfo, true); if (stateMatchInfo.getMatchedStateSize() > 0) { stateMatchInfo.setSourceStateOrdinal(i); stateMatchInfo.setSourceTransitionIterator(transitionIterator); if (stateMatchInfo.hasFinalState()) { break; } } } if (stateMatchInfo.getMatchedStateSize() == 0 || !stateMatchInfo.hasFinalState()) { traceback(child, stateMatchInfo, sourceStateMatchInfo.getMatchedStateSize() - 1, true); if (stateMatchInfo.getMatchedStateSize() == 0) { releaseNode(child); continue; } } } else { for (int i = 0; i < sourceStateMatchInfo.getMatchedStateSize(); i++) { sourceState = sourceStateMatchInfo.getMatchedState(i); transitionIterator = tryGetNextMatchedState(child, sourceState, stateMatchInfo, false); if (stateMatchInfo.getMatchedStateSize() > 0) { stateMatchInfo.setSourceStateOrdinal(i); stateMatchInfo.setSourceTransitionIterator(transitionIterator); break; } } if (stateMatchInfo.getMatchedStateSize() == 0) { traceback(child, stateMatchInfo, sourceStateMatchInfo.getMatchedStateSize() - 1, false); if (stateMatchInfo.getMatchedStateSize() == 0) { releaseNode(child); continue; } } } saveResult(child, stateMatchInfo); return; } } @Override public Iterator<N> getIterator() { return iterator; } /** * Try to get next matched state from sourceState and add it into currentStateMatchInfo * * @param child child node to match * @param sourceState source state * @param currentStateMatchInfo currentStateMatchInfo * @return iterator of rest transitions */ private Iterator<IFATransition> tryGetNextMatchedState( N child, IFAState sourceState, IStateMatchInfo currentStateMatchInfo, boolean needFinalState) { Map<String, IFATransition> preciseMatchTransitionMap = patternFA.getPreciseMatchTransition(sourceState); IFAState matchedState; if (!preciseMatchTransitionMap.isEmpty()) { matchedState = tryGetNextState(child, sourceState, preciseMatchTransitionMap); if (matchedState != null) { currentStateMatchInfo.addMatchedState(matchedState); if (!needFinalState || matchedState.isFinal()) { return patternFA.getFuzzyMatchTransitionIterator(sourceState); } } } Iterator<IFATransition> transitionIterator = patternFA.getFuzzyMatchTransitionIterator(sourceState); while (transitionIterator.hasNext()) { matchedState = tryGetNextState(child, sourceState, transitionIterator.next()); if (matchedState != null) { currentStateMatchInfo.addMatchedState(matchedState); if (!needFinalState || matchedState.isFinal()) { return transitionIterator; } } } return transitionIterator; } private void traceback( N node, IStateMatchInfo stateMatchInfo, int checkedSourceStateOrdinal, boolean needFinalState) { IStateMatchInfo parentStateMatchInfo; N currentNode; IStateMatchInfo currentStateMatchInfo; int sourceStateOrdinal; IFAState sourceState = null; Iterator<IFATransition> transitionIterator = null; int matchedStateSize; IFAState matchedState; int currentNodeIndex; for (int i = ancestorStack.size() - 1; i >= firstAncestorOfTraceback; i--) { parentStateMatchInfo = ancestorStack.get(i - 1).stateMatchInfo; currentStateMatchInfo = ancestorStack.get(i).stateMatchInfo; // there's no state not further searched if (currentStateMatchInfo.getSourceStateOrdinal() == parentStateMatchInfo.getMatchedStateSize()) { continue; } // there's some state not further searched, process them in order currentNodeIndex = i; while (currentNodeIndex >= i) { parentStateMatchInfo = ancestorStack.get(currentNodeIndex - 1).stateMatchInfo; if (currentNodeIndex == ancestorStack.size()) { currentNode = node; currentStateMatchInfo = stateMatchInfo; } else { currentNode = ancestorStack.get(currentNodeIndex).node; currentStateMatchInfo = ancestorStack.get(currentNodeIndex).stateMatchInfo; } matchedState = null; if (currentNode == node) { sourceStateOrdinal = checkedSourceStateOrdinal; } else { sourceStateOrdinal = currentStateMatchInfo.getSourceStateOrdinal(); if (sourceStateOrdinal == parentStateMatchInfo.getMatchedStateSize()) { currentNodeIndex--; continue; } // there may be some states could be matched from transition of current source state sourceState = parentStateMatchInfo.getMatchedState(sourceStateOrdinal); transitionIterator = currentStateMatchInfo.getSourceTransitionIterator(); while (transitionIterator.hasNext()) { matchedState = tryGetNextState(currentNode, sourceState, transitionIterator.next()); if (matchedState != null) { break; } } } if (matchedState == null) { while (++sourceStateOrdinal < parentStateMatchInfo.getMatchedStateSize()) { sourceState = parentStateMatchInfo.getMatchedState(sourceStateOrdinal); matchedStateSize = currentStateMatchInfo.getMatchedStateSize(); transitionIterator = tryGetNextMatchedState( currentNode, sourceState, currentStateMatchInfo, needFinalState); // change of matchedStateSize means currentNode there is transition from sourceState // matching currentNode if (matchedStateSize != currentStateMatchInfo.getMatchedStateSize()) { matchedState = currentStateMatchInfo.getMatchedState(matchedStateSize); currentStateMatchInfo.setSourceStateOrdinal(sourceStateOrdinal); currentStateMatchInfo.setSourceTransitionIterator(transitionIterator); break; } } if (matchedState == null) { currentStateMatchInfo.setSourceStateOrdinal(sourceStateOrdinal - 1); currentStateMatchInfo.setSourceTransitionIterator(transitionIterator); currentNodeIndex--; continue; } } currentStateMatchInfo.addMatchedState(matchedState); if (currentNode == node) { if (needFinalState && !currentStateMatchInfo.hasFinalState()) { while (transitionIterator.hasNext()) { matchedState = tryGetNextState(currentNode, sourceState, transitionIterator.next()); if (matchedState != null) { currentStateMatchInfo.addMatchedState(matchedState); if (matchedState.isFinal()) { return; } } } currentNodeIndex--; } else { return; } } else { currentNodeIndex++; } } } } @Override protected void close() { super.close(); if (iterator != null) { releaseNodeIterator(iterator); } } } // the match process of FA graph is a dfs on FA Graph // a tmp way to process alias of measurement node, which may results in multi event when checking // the transition; // fortunately, the measurement node only match the final state, which means there won't be any // multi transition and traceback judge protected IFAState tryGetNextState( final N node, final IFAState sourceState, final Map<String, IFATransition> preciseMatchTransitionMap) { final IFATransition transition = preciseMatchTransitionMap.get(node.getName()); if (transition == null) { return null; } return patternFA.getNextState(sourceState, transition); } // a tmp way to process alias of measurement node, which may results in multi event when checking // the transition; // fortunately, the measurement node only match the final state, which means there won't be any // multi transition and traceback judge protected IFAState tryGetNextState( final N node, final IFAState sourceState, final IFATransition transition) { if (transition.isMatch(node.getName())) { return patternFA.getNextState(sourceState, transition); } else { return null; } } /** * May node can be accepted if it reaches final state. Its implementation should not depend on the * context. * * @param node node to be checked * @return {@code false} is if node must not be accepted. Otherwise, return {@code true}. */ protected abstract boolean mayTargetNodeType(final N node); }
googleapis/google-cloud-java
36,648
java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListControlsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1alpha/control_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1alpha; /** * * * <pre> * Response for ListControls method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListControlsResponse} */ public final class ListControlsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListControlsResponse) ListControlsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListControlsResponse.newBuilder() to construct. private ListControlsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListControlsResponse() { controls_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListControlsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListControlsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListControlsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListControlsResponse.class, com.google.cloud.discoveryengine.v1alpha.ListControlsResponse.Builder.class); } public static final int CONTROLS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.discoveryengine.v1alpha.Control> controls_; /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.discoveryengine.v1alpha.Control> getControlsList() { return controls_; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.ControlOrBuilder> getControlsOrBuilderList() { return controls_; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ @java.lang.Override public int getControlsCount() { return controls_.size(); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.Control getControls(int index) { return controls_.get(index); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ControlOrBuilder getControlsOrBuilder(int index) { return controls_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < controls_.size(); i++) { output.writeMessage(1, controls_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < controls_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, controls_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListControlsResponse)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1alpha.ListControlsResponse other = (com.google.cloud.discoveryengine.v1alpha.ListControlsResponse) obj; if (!getControlsList().equals(other.getControlsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getControlsCount() > 0) { hash = (37 * hash) + CONTROLS_FIELD_NUMBER; hash = (53 * hash) + getControlsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1alpha.ListControlsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for ListControls method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListControlsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListControlsResponse) com.google.cloud.discoveryengine.v1alpha.ListControlsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListControlsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListControlsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListControlsResponse.class, com.google.cloud.discoveryengine.v1alpha.ListControlsResponse.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1alpha.ListControlsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (controlsBuilder_ == null) { controls_ = java.util.Collections.emptyList(); } else { controls_ = null; controlsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1alpha.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListControlsResponse_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListControlsResponse getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1alpha.ListControlsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListControlsResponse build() { com.google.cloud.discoveryengine.v1alpha.ListControlsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListControlsResponse buildPartial() { com.google.cloud.discoveryengine.v1alpha.ListControlsResponse result = new com.google.cloud.discoveryengine.v1alpha.ListControlsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.discoveryengine.v1alpha.ListControlsResponse result) { if (controlsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { controls_ = java.util.Collections.unmodifiableList(controls_); bitField0_ = (bitField0_ & ~0x00000001); } result.controls_ = controls_; } else { result.controls_ = controlsBuilder_.build(); } } private void buildPartial0( com.google.cloud.discoveryengine.v1alpha.ListControlsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListControlsResponse) { return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ListControlsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.ListControlsResponse other) { if (other == com.google.cloud.discoveryengine.v1alpha.ListControlsResponse.getDefaultInstance()) return this; if (controlsBuilder_ == null) { if (!other.controls_.isEmpty()) { if (controls_.isEmpty()) { controls_ = other.controls_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureControlsIsMutable(); controls_.addAll(other.controls_); } onChanged(); } } else { if (!other.controls_.isEmpty()) { if (controlsBuilder_.isEmpty()) { controlsBuilder_.dispose(); controlsBuilder_ = null; controls_ = other.controls_; bitField0_ = (bitField0_ & ~0x00000001); controlsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getControlsFieldBuilder() : null; } else { controlsBuilder_.addAllMessages(other.controls_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.discoveryengine.v1alpha.Control m = input.readMessage( com.google.cloud.discoveryengine.v1alpha.Control.parser(), extensionRegistry); if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(m); } else { controlsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.discoveryengine.v1alpha.Control> controls_ = java.util.Collections.emptyList(); private void ensureControlsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { controls_ = new java.util.ArrayList<com.google.cloud.discoveryengine.v1alpha.Control>(controls_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Control, com.google.cloud.discoveryengine.v1alpha.Control.Builder, com.google.cloud.discoveryengine.v1alpha.ControlOrBuilder> controlsBuilder_; /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public java.util.List<com.google.cloud.discoveryengine.v1alpha.Control> getControlsList() { if (controlsBuilder_ == null) { return java.util.Collections.unmodifiableList(controls_); } else { return controlsBuilder_.getMessageList(); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public int getControlsCount() { if (controlsBuilder_ == null) { return controls_.size(); } else { return controlsBuilder_.getCount(); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1alpha.Control getControls(int index) { if (controlsBuilder_ == null) { return controls_.get(index); } else { return controlsBuilder_.getMessage(index); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder setControls(int index, com.google.cloud.discoveryengine.v1alpha.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.set(index, value); onChanged(); } else { controlsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder setControls( int index, com.google.cloud.discoveryengine.v1alpha.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.set(index, builderForValue.build()); onChanged(); } else { controlsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder addControls(com.google.cloud.discoveryengine.v1alpha.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.add(value); onChanged(); } else { controlsBuilder_.addMessage(value); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder addControls(int index, com.google.cloud.discoveryengine.v1alpha.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.add(index, value); onChanged(); } else { controlsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder addControls( com.google.cloud.discoveryengine.v1alpha.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(builderForValue.build()); onChanged(); } else { controlsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder addControls( int index, com.google.cloud.discoveryengine.v1alpha.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(index, builderForValue.build()); onChanged(); } else { controlsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder addAllControls( java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1alpha.Control> values) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, controls_); onChanged(); } else { controlsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder clearControls() { if (controlsBuilder_ == null) { controls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { controlsBuilder_.clear(); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public Builder removeControls(int index) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.remove(index); onChanged(); } else { controlsBuilder_.remove(index); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1alpha.Control.Builder getControlsBuilder(int index) { return getControlsFieldBuilder().getBuilder(index); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1alpha.ControlOrBuilder getControlsOrBuilder( int index) { if (controlsBuilder_ == null) { return controls_.get(index); } else { return controlsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.ControlOrBuilder> getControlsOrBuilderList() { if (controlsBuilder_ != null) { return controlsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(controls_); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1alpha.Control.Builder addControlsBuilder() { return getControlsFieldBuilder() .addBuilder(com.google.cloud.discoveryengine.v1alpha.Control.getDefaultInstance()); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1alpha.Control.Builder addControlsBuilder(int index) { return getControlsFieldBuilder() .addBuilder(index, com.google.cloud.discoveryengine.v1alpha.Control.getDefaultInstance()); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1alpha.Control controls = 1;</code> */ public java.util.List<com.google.cloud.discoveryengine.v1alpha.Control.Builder> getControlsBuilderList() { return getControlsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Control, com.google.cloud.discoveryengine.v1alpha.Control.Builder, com.google.cloud.discoveryengine.v1alpha.ControlOrBuilder> getControlsFieldBuilder() { if (controlsBuilder_ == null) { controlsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Control, com.google.cloud.discoveryengine.v1alpha.Control.Builder, com.google.cloud.discoveryengine.v1alpha.ControlOrBuilder>( controls_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); controls_ = null; } return controlsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListControlsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListControlsResponse) private static final com.google.cloud.discoveryengine.v1alpha.ListControlsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListControlsResponse(); } public static com.google.cloud.discoveryengine.v1alpha.ListControlsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListControlsResponse> PARSER = new com.google.protobuf.AbstractParser<ListControlsResponse>() { @java.lang.Override public ListControlsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListControlsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListControlsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListControlsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,721
java-certificate-manager/proto-google-cloud-certificate-manager-v1/src/main/java/com/google/cloud/certificatemanager/v1/UpdateCertificateMapRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/certificatemanager/v1/certificate_manager.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.certificatemanager.v1; /** * * * <pre> * Request for the `UpdateCertificateMap` method. * </pre> * * Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateMapRequest} */ public final class UpdateCertificateMapRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.certificatemanager.v1.UpdateCertificateMapRequest) UpdateCertificateMapRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateCertificateMapRequest.newBuilder() to construct. private UpdateCertificateMapRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateCertificateMapRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateCertificateMapRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest.class, com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest.Builder.class); } private int bitField0_; public static final int CERTIFICATE_MAP_FIELD_NUMBER = 1; private com.google.cloud.certificatemanager.v1.CertificateMap certificateMap_; /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the certificateMap field is set. */ @java.lang.Override public boolean hasCertificateMap() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The certificateMap. */ @java.lang.Override public com.google.cloud.certificatemanager.v1.CertificateMap getCertificateMap() { return certificateMap_ == null ? com.google.cloud.certificatemanager.v1.CertificateMap.getDefaultInstance() : certificateMap_; } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.certificatemanager.v1.CertificateMapOrBuilder getCertificateMapOrBuilder() { return certificateMap_ == null ? com.google.cloud.certificatemanager.v1.CertificateMap.getDefaultInstance() : certificateMap_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCertificateMap()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCertificateMap()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest)) { return super.equals(obj); } com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest other = (com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest) obj; if (hasCertificateMap() != other.hasCertificateMap()) return false; if (hasCertificateMap()) { if (!getCertificateMap().equals(other.getCertificateMap())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCertificateMap()) { hash = (37 * hash) + CERTIFICATE_MAP_FIELD_NUMBER; hash = (53 * hash) + getCertificateMap().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for the `UpdateCertificateMap` method. * </pre> * * Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateMapRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.certificatemanager.v1.UpdateCertificateMapRequest) com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest.class, com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest.Builder.class); } // Construct using // com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCertificateMapFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; certificateMap_ = null; if (certificateMapBuilder_ != null) { certificateMapBuilder_.dispose(); certificateMapBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.certificatemanager.v1.CertificateManagerProto .internal_static_google_cloud_certificatemanager_v1_UpdateCertificateMapRequest_descriptor; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest getDefaultInstanceForType() { return com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest build() { com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest buildPartial() { com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest result = new com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.certificateMap_ = certificateMapBuilder_ == null ? certificateMap_ : certificateMapBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest) { return mergeFrom( (com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest other) { if (other == com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest .getDefaultInstance()) return this; if (other.hasCertificateMap()) { mergeCertificateMap(other.getCertificateMap()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCertificateMapFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.certificatemanager.v1.CertificateMap certificateMap_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.CertificateMap, com.google.cloud.certificatemanager.v1.CertificateMap.Builder, com.google.cloud.certificatemanager.v1.CertificateMapOrBuilder> certificateMapBuilder_; /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the certificateMap field is set. */ public boolean hasCertificateMap() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The certificateMap. */ public com.google.cloud.certificatemanager.v1.CertificateMap getCertificateMap() { if (certificateMapBuilder_ == null) { return certificateMap_ == null ? com.google.cloud.certificatemanager.v1.CertificateMap.getDefaultInstance() : certificateMap_; } else { return certificateMapBuilder_.getMessage(); } } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCertificateMap(com.google.cloud.certificatemanager.v1.CertificateMap value) { if (certificateMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } certificateMap_ = value; } else { certificateMapBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCertificateMap( com.google.cloud.certificatemanager.v1.CertificateMap.Builder builderForValue) { if (certificateMapBuilder_ == null) { certificateMap_ = builderForValue.build(); } else { certificateMapBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCertificateMap( com.google.cloud.certificatemanager.v1.CertificateMap value) { if (certificateMapBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && certificateMap_ != null && certificateMap_ != com.google.cloud.certificatemanager.v1.CertificateMap.getDefaultInstance()) { getCertificateMapBuilder().mergeFrom(value); } else { certificateMap_ = value; } } else { certificateMapBuilder_.mergeFrom(value); } if (certificateMap_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCertificateMap() { bitField0_ = (bitField0_ & ~0x00000001); certificateMap_ = null; if (certificateMapBuilder_ != null) { certificateMapBuilder_.dispose(); certificateMapBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.certificatemanager.v1.CertificateMap.Builder getCertificateMapBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCertificateMapFieldBuilder().getBuilder(); } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.certificatemanager.v1.CertificateMapOrBuilder getCertificateMapOrBuilder() { if (certificateMapBuilder_ != null) { return certificateMapBuilder_.getMessageOrBuilder(); } else { return certificateMap_ == null ? com.google.cloud.certificatemanager.v1.CertificateMap.getDefaultInstance() : certificateMap_; } } /** * * * <pre> * Required. A definition of the certificate map to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.CertificateMap certificate_map = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.CertificateMap, com.google.cloud.certificatemanager.v1.CertificateMap.Builder, com.google.cloud.certificatemanager.v1.CertificateMapOrBuilder> getCertificateMapFieldBuilder() { if (certificateMapBuilder_ == null) { certificateMapBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.CertificateMap, com.google.cloud.certificatemanager.v1.CertificateMap.Builder, com.google.cloud.certificatemanager.v1.CertificateMapOrBuilder>( getCertificateMap(), getParentForChildren(), isClean()); certificateMap_ = null; } return certificateMapBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.certificatemanager.v1.UpdateCertificateMapRequest) } // @@protoc_insertion_point(class_scope:google.cloud.certificatemanager.v1.UpdateCertificateMapRequest) private static final com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest(); } public static com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateCertificateMapRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateCertificateMapRequest>() { @java.lang.Override public UpdateCertificateMapRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateCertificateMapRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateCertificateMapRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateCertificateMapRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
36,704
java-iam/proto-google-iam-v3/src/main/java/com/google/iam/v3/ListPolicyBindingsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/iam/v3/policy_bindings_service.proto // Protobuf Java Version: 3.25.8 package com.google.iam.v3; /** * * * <pre> * Response message for ListPolicyBindings method. * </pre> * * Protobuf type {@code google.iam.v3.ListPolicyBindingsResponse} */ public final class ListPolicyBindingsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.iam.v3.ListPolicyBindingsResponse) ListPolicyBindingsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPolicyBindingsResponse.newBuilder() to construct. private ListPolicyBindingsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPolicyBindingsResponse() { policyBindings_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPolicyBindingsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_ListPolicyBindingsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_ListPolicyBindingsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.v3.ListPolicyBindingsResponse.class, com.google.iam.v3.ListPolicyBindingsResponse.Builder.class); } public static final int POLICY_BINDINGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.iam.v3.PolicyBinding> policyBindings_; /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public java.util.List<com.google.iam.v3.PolicyBinding> getPolicyBindingsList() { return policyBindings_; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.iam.v3.PolicyBindingOrBuilder> getPolicyBindingsOrBuilderList() { return policyBindings_; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public int getPolicyBindingsCount() { return policyBindings_.size(); } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public com.google.iam.v3.PolicyBinding getPolicyBindings(int index) { return policyBindings_.get(index); } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public com.google.iam.v3.PolicyBindingOrBuilder getPolicyBindingsOrBuilder(int index) { return policyBindings_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policyBindings_.size(); i++) { output.writeMessage(1, policyBindings_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policyBindings_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, policyBindings_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.iam.v3.ListPolicyBindingsResponse)) { return super.equals(obj); } com.google.iam.v3.ListPolicyBindingsResponse other = (com.google.iam.v3.ListPolicyBindingsResponse) obj; if (!getPolicyBindingsList().equals(other.getPolicyBindingsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPolicyBindingsCount() > 0) { hash = (37 * hash) + POLICY_BINDINGS_FIELD_NUMBER; hash = (53 * hash) + getPolicyBindingsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.v3.ListPolicyBindingsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.iam.v3.ListPolicyBindingsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.v3.ListPolicyBindingsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.iam.v3.ListPolicyBindingsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListPolicyBindings method. * </pre> * * Protobuf type {@code google.iam.v3.ListPolicyBindingsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.iam.v3.ListPolicyBindingsResponse) com.google.iam.v3.ListPolicyBindingsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_ListPolicyBindingsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_ListPolicyBindingsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.v3.ListPolicyBindingsResponse.class, com.google.iam.v3.ListPolicyBindingsResponse.Builder.class); } // Construct using com.google.iam.v3.ListPolicyBindingsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policyBindingsBuilder_ == null) { policyBindings_ = java.util.Collections.emptyList(); } else { policyBindings_ = null; policyBindingsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_ListPolicyBindingsResponse_descriptor; } @java.lang.Override public com.google.iam.v3.ListPolicyBindingsResponse getDefaultInstanceForType() { return com.google.iam.v3.ListPolicyBindingsResponse.getDefaultInstance(); } @java.lang.Override public com.google.iam.v3.ListPolicyBindingsResponse build() { com.google.iam.v3.ListPolicyBindingsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.iam.v3.ListPolicyBindingsResponse buildPartial() { com.google.iam.v3.ListPolicyBindingsResponse result = new com.google.iam.v3.ListPolicyBindingsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.iam.v3.ListPolicyBindingsResponse result) { if (policyBindingsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policyBindings_ = java.util.Collections.unmodifiableList(policyBindings_); bitField0_ = (bitField0_ & ~0x00000001); } result.policyBindings_ = policyBindings_; } else { result.policyBindings_ = policyBindingsBuilder_.build(); } } private void buildPartial0(com.google.iam.v3.ListPolicyBindingsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.iam.v3.ListPolicyBindingsResponse) { return mergeFrom((com.google.iam.v3.ListPolicyBindingsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.iam.v3.ListPolicyBindingsResponse other) { if (other == com.google.iam.v3.ListPolicyBindingsResponse.getDefaultInstance()) return this; if (policyBindingsBuilder_ == null) { if (!other.policyBindings_.isEmpty()) { if (policyBindings_.isEmpty()) { policyBindings_ = other.policyBindings_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePolicyBindingsIsMutable(); policyBindings_.addAll(other.policyBindings_); } onChanged(); } } else { if (!other.policyBindings_.isEmpty()) { if (policyBindingsBuilder_.isEmpty()) { policyBindingsBuilder_.dispose(); policyBindingsBuilder_ = null; policyBindings_ = other.policyBindings_; bitField0_ = (bitField0_ & ~0x00000001); policyBindingsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPolicyBindingsFieldBuilder() : null; } else { policyBindingsBuilder_.addAllMessages(other.policyBindings_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.iam.v3.PolicyBinding m = input.readMessage(com.google.iam.v3.PolicyBinding.parser(), extensionRegistry); if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.add(m); } else { policyBindingsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.iam.v3.PolicyBinding> policyBindings_ = java.util.Collections.emptyList(); private void ensurePolicyBindingsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policyBindings_ = new java.util.ArrayList<com.google.iam.v3.PolicyBinding>(policyBindings_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v3.PolicyBinding, com.google.iam.v3.PolicyBinding.Builder, com.google.iam.v3.PolicyBindingOrBuilder> policyBindingsBuilder_; /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public java.util.List<com.google.iam.v3.PolicyBinding> getPolicyBindingsList() { if (policyBindingsBuilder_ == null) { return java.util.Collections.unmodifiableList(policyBindings_); } else { return policyBindingsBuilder_.getMessageList(); } } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public int getPolicyBindingsCount() { if (policyBindingsBuilder_ == null) { return policyBindings_.size(); } else { return policyBindingsBuilder_.getCount(); } } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding getPolicyBindings(int index) { if (policyBindingsBuilder_ == null) { return policyBindings_.get(index); } else { return policyBindingsBuilder_.getMessage(index); } } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder setPolicyBindings(int index, com.google.iam.v3.PolicyBinding value) { if (policyBindingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyBindingsIsMutable(); policyBindings_.set(index, value); onChanged(); } else { policyBindingsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder setPolicyBindings( int index, com.google.iam.v3.PolicyBinding.Builder builderForValue) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.set(index, builderForValue.build()); onChanged(); } else { policyBindingsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings(com.google.iam.v3.PolicyBinding value) { if (policyBindingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyBindingsIsMutable(); policyBindings_.add(value); onChanged(); } else { policyBindingsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings(int index, com.google.iam.v3.PolicyBinding value) { if (policyBindingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyBindingsIsMutable(); policyBindings_.add(index, value); onChanged(); } else { policyBindingsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings(com.google.iam.v3.PolicyBinding.Builder builderForValue) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.add(builderForValue.build()); onChanged(); } else { policyBindingsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings( int index, com.google.iam.v3.PolicyBinding.Builder builderForValue) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.add(index, builderForValue.build()); onChanged(); } else { policyBindingsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addAllPolicyBindings( java.lang.Iterable<? extends com.google.iam.v3.PolicyBinding> values) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, policyBindings_); onChanged(); } else { policyBindingsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder clearPolicyBindings() { if (policyBindingsBuilder_ == null) { policyBindings_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policyBindingsBuilder_.clear(); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder removePolicyBindings(int index) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.remove(index); onChanged(); } else { policyBindingsBuilder_.remove(index); } return this; } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding.Builder getPolicyBindingsBuilder(int index) { return getPolicyBindingsFieldBuilder().getBuilder(index); } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBindingOrBuilder getPolicyBindingsOrBuilder(int index) { if (policyBindingsBuilder_ == null) { return policyBindings_.get(index); } else { return policyBindingsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public java.util.List<? extends com.google.iam.v3.PolicyBindingOrBuilder> getPolicyBindingsOrBuilderList() { if (policyBindingsBuilder_ != null) { return policyBindingsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policyBindings_); } } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding.Builder addPolicyBindingsBuilder() { return getPolicyBindingsFieldBuilder() .addBuilder(com.google.iam.v3.PolicyBinding.getDefaultInstance()); } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding.Builder addPolicyBindingsBuilder(int index) { return getPolicyBindingsFieldBuilder() .addBuilder(index, com.google.iam.v3.PolicyBinding.getDefaultInstance()); } /** * * * <pre> * The policy bindings from the specified parent. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public java.util.List<com.google.iam.v3.PolicyBinding.Builder> getPolicyBindingsBuilderList() { return getPolicyBindingsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v3.PolicyBinding, com.google.iam.v3.PolicyBinding.Builder, com.google.iam.v3.PolicyBindingOrBuilder> getPolicyBindingsFieldBuilder() { if (policyBindingsBuilder_ == null) { policyBindingsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v3.PolicyBinding, com.google.iam.v3.PolicyBinding.Builder, com.google.iam.v3.PolicyBindingOrBuilder>( policyBindings_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policyBindings_ = null; } return policyBindingsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.iam.v3.ListPolicyBindingsResponse) } // @@protoc_insertion_point(class_scope:google.iam.v3.ListPolicyBindingsResponse) private static final com.google.iam.v3.ListPolicyBindingsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.iam.v3.ListPolicyBindingsResponse(); } public static com.google.iam.v3.ListPolicyBindingsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPolicyBindingsResponse> PARSER = new com.google.protobuf.AbstractParser<ListPolicyBindingsResponse>() { @java.lang.Override public ListPolicyBindingsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPolicyBindingsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPolicyBindingsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.iam.v3.ListPolicyBindingsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/geode
36,765
geode-core/src/distributedTest/java/org/apache/geode/cache/query/internal/index/ConcurrentIndexUpdateWithoutWLDUnitTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal.index; import static org.apache.geode.cache.Region.SEPARATOR; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collection; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheException; import org.apache.geode.cache.CacheExistsException; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.query.Index; import org.apache.geode.cache.query.IndexStatistics; import org.apache.geode.cache.query.data.Portfolio; import org.apache.geode.cache.query.data.Position; import org.apache.geode.cache.query.internal.index.AbstractIndex.RegionEntryToValuesMap; import org.apache.geode.cache.query.internal.index.IndexStore.IndexStoreEntry; import org.apache.geode.cache.query.internal.index.MemoryIndexStore.MemoryIndexStoreEntry; import org.apache.geode.cache.query.partitioned.PRQueryDUnitHelper; import org.apache.geode.cache30.CacheSerializableRunnable; import org.apache.geode.cache30.CacheTestCase; import org.apache.geode.internal.cache.CachedDeserializable; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.RegionEntry; import org.apache.geode.internal.cache.Token; import org.apache.geode.internal.cache.persistence.query.CloseableIterator; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.AsyncInvocation; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.Invoke; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.dunit.SerializableRunnableIF; import org.apache.geode.test.dunit.ThreadUtils; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase; import org.apache.geode.test.junit.categories.OQLIndexTest; import org.apache.geode.util.internal.GeodeGlossary; /** * During validation all region operations are paused for a while. Validation happens multiple time * during one test run on a fixed time interval. */ @Category({OQLIndexTest.class}) public class ConcurrentIndexUpdateWithoutWLDUnitTest extends JUnit4DistributedTestCase { PRQueryDUnitHelper helper = new PRQueryDUnitHelper(); private static final String regionName = "Portfolios"; private final int redundancy = 1; // CompactRangeIndex private final String indexName = "idIndex"; private final String indexedExpression = "ID"; private final String fromClause = SEPARATOR + regionName; private final String alias = "p"; private final String rindexName = "secidIndex"; private final String rindexedExpression = "pos.secId"; private final String rfromClause = SEPARATOR + regionName + " p, p.positions.values pos"; private final String ralias = "pos"; int stepSize = 10; private final int totalDataSize = 50; public void setCacheInVMs(VM... vms) { for (VM vm : vms) { vm.invoke(this::getAvailableCacheElseCreateCache); } } private final void getAvailableCacheElseCreateCache() { synchronized (ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest.class) { try { Cache newCache = GemFireCacheImpl.getInstance(); if (null == newCache) { System.setProperty( GeodeGlossary.GEMFIRE_PREFIX + "DISABLE_DISCONNECT_DS_ON_CACHE_CLOSE", "true"); newCache = CacheFactory.create(getSystem()); } PRQueryDUnitHelper.setCache(newCache); } catch (CacheExistsException e) { Assert.fail("the cache already exists", e); // TODO: remove error handling } catch (RuntimeException ex) { throw ex; } catch (Exception ex) { Assert.fail("Checked exception while initializing cache??", ex); } finally { System.clearProperty( GeodeGlossary.GEMFIRE_PREFIX + "DISABLE_DISCONNECT_DS_ON_CACHE_CLOSE"); } } } /** * Tear down a PartitionedRegionTestCase by cleaning up the existing cache (mainly because we want * to destroy any existing PartitionedRegions) */ @Override public final void preTearDown() throws Exception { Invoke.invokeInEveryVM(() -> ConcurrentIndexUpdateWithoutWLDUnitTest.destroyRegions()); Invoke.invokeInEveryVM(CacheTestCase::closeCache); } public static synchronized void destroyRegions() { Cache cache = GemFireCacheImpl.getInstance(); if (cache != null) { Region region = cache.getRegion(regionName); if (region != null) { region.destroyRegion(); } } } // Tests on Local/Replicated Region @Test public void testCompactRangeIndex() { // Create a Local Region. Host host = Host.getHost(0); VM vm0 = host.getVM(0); setCacheInVMs(vm0); vm0.invoke(helper.getCacheSerializableRunnableForReplicatedRegionCreation(regionName)); vm0.invoke(helper.getCacheSerializableRunnableForPRIndexCreate(regionName, indexName, indexedExpression, fromClause, alias)); AsyncInvocation<?>[] asyncInvs = new AsyncInvocation[2]; asyncInvs[0] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[1] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); for (AsyncInvocation<?> inv : asyncInvs) { ThreadUtils.join(inv, 30 * 000); } for (AsyncInvocation<?> inv : asyncInvs) { if (inv.exceptionOccurred()) { Assert.fail("Random region operation failed on VM_" + inv.getId(), inv.getException()); } } vm0.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); } @Test public void testMultiIndexCreation() { // Create a Local Region. Host host = Host.getHost(0); VM vm0 = host.getVM(1); setCacheInVMs(vm0); vm0.invoke(helper.getCacheSerializableRunnableForReplicatedRegionCreation(regionName)); ArrayList<String> names = new ArrayList<>(); names.add(indexName); names.add(rindexName); ArrayList<String> exps = new ArrayList<>(); exps.add(indexedExpression); exps.add(rindexedExpression); ArrayList<String> fromClauses = new ArrayList<>(); fromClauses.add(fromClause); fromClauses.add(rfromClause); vm0.invoke( helper.getCacheSerializableRunnableForDefineIndex(regionName, names, exps, fromClauses)); AsyncInvocation<?>[] asyncInvs = new AsyncInvocation[2]; asyncInvs[0] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[1] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); for (AsyncInvocation<?> inv : asyncInvs) { ThreadUtils.join(inv, 30 * 000); } for (AsyncInvocation<?> inv : asyncInvs) { if (inv.exceptionOccurred()) { Assert.fail("Random region operation failed on VM_" + inv.getId(), inv.getException()); } } vm0.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); } private SerializableRunnableIF getCacheSerializableRunnableForIndexValidation( final String regionName, final String indexName) { return new CacheSerializableRunnable("Index Validate") { @Override public void run2() throws CacheException { Cache cache = PRQueryDUnitHelper.getCache(); Region region = cache.getRegion(regionName); IndexValidator validator = new IndexValidator(); validator.validate(region); } }; } @Test public void testRangeIndex() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); setCacheInVMs(vm0); vm0.invoke(helper.getCacheSerializableRunnableForReplicatedRegionCreation(regionName)); vm0.invoke(helper.getCacheSerializableRunnableForPRIndexCreate(regionName, rindexName, rindexedExpression, rfromClause, ralias)); AsyncInvocation<?>[] asyncInvs = new AsyncInvocation[2]; asyncInvs[0] = vm0.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, totalDataSize)); asyncInvs[1] = vm0.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, totalDataSize)); for (AsyncInvocation<?> inv : asyncInvs) { ThreadUtils.join(inv, 30 * 000); } for (AsyncInvocation<?> inv : asyncInvs) { if (inv.exceptionOccurred()) { Assert.fail("Random region operation failed on VM_" + inv.getId(), inv.getException()); } } vm0.invoke(getCacheSerializableRunnableForIndexValidation(regionName, rindexName)); } // Tests on Partition Region @Test public void testCompactRangeIndexOnPR() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); VM vm3 = host.getVM(3); setCacheInVMs(vm0, vm1, vm2, vm3); vm0.invoke(helper.getCacheSerializableRunnableForPRAccessorCreate(regionName, redundancy, Portfolio.class)); vm1.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm2.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm3.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm0.invoke(helper.getCacheSerializableRunnableForPRIndexCreate(regionName, indexName, indexedExpression, fromClause, alias)); AsyncInvocation<?>[] asyncInvs = new AsyncInvocation[12]; asyncInvs[0] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[1] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[2] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[3] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); asyncInvs[4] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[5] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[6] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[7] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); asyncInvs[8] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[9] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[10] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); for (AsyncInvocation<?> inv : asyncInvs) { ThreadUtils.join(inv, 60 * 000); } for (AsyncInvocation<?> inv : asyncInvs) { if (inv.exceptionOccurred()) { Assert.fail("Random region operation failed on VM_" + inv.getId(), inv.getException()); } } vm0.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); vm1.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); vm2.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); vm3.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); } @Test public void testRangeIndexOnPR() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); VM vm3 = host.getVM(3); setCacheInVMs(vm0, vm1, vm2, vm3); vm0.invoke(helper.getCacheSerializableRunnableForPRAccessorCreate(regionName, redundancy, Portfolio.class)); vm1.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm2.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm3.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm0.invoke(helper.getCacheSerializableRunnableForPRIndexCreate(regionName, rindexName, rindexedExpression, rfromClause, ralias)); AsyncInvocation<?>[] asyncInvs = new AsyncInvocation[12]; asyncInvs[0] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[1] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[2] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[3] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); asyncInvs[4] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[5] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[6] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[7] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); asyncInvs[8] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[9] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[10] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); for (AsyncInvocation<?> inv : asyncInvs) { ThreadUtils.join(inv, 60 * 000); } for (AsyncInvocation<?> inv : asyncInvs) { if (inv.exceptionOccurred()) { Assert.fail("Random region operation failed on VM_" + inv.getId(), inv.getException()); } } vm0.invoke(getCacheSerializableRunnableForIndexValidation(regionName, rindexName)); vm1.invoke(getCacheSerializableRunnableForIndexValidation(regionName, rindexName)); vm2.invoke(getCacheSerializableRunnableForIndexValidation(regionName, rindexName)); vm3.invoke(getCacheSerializableRunnableForIndexValidation(regionName, rindexName)); } @Test public void testMultiIndexOnPR() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); VM vm3 = host.getVM(3); setCacheInVMs(vm0, vm1, vm2, vm3); vm0.invoke(helper.getCacheSerializableRunnableForPRAccessorCreate(regionName, redundancy, Portfolio.class)); vm1.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm2.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); vm3.invoke( helper.getCacheSerializableRunnableForPRCreate(regionName, redundancy, Portfolio.class)); ArrayList<String> names = new ArrayList<>(); names.add(indexName); names.add(rindexName); ArrayList<String> exps = new ArrayList<>(); exps.add(indexedExpression); exps.add(rindexedExpression); ArrayList<String> fromClauses = new ArrayList<>(); fromClauses.add(fromClause); fromClauses.add(rfromClause); vm0.invoke( helper.getCacheSerializableRunnableForDefineIndex(regionName, names, exps, fromClauses)); AsyncInvocation<?>[] asyncInvs = new AsyncInvocation[12]; asyncInvs[0] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[1] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[2] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[3] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); asyncInvs[4] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[5] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[6] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[7] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); asyncInvs[8] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize)); asyncInvs[9] = vm1.invokeAsync( helper.getCacheSerializableRunnableForPRRandomOps(regionName, stepSize, (2 * stepSize))); asyncInvs[10] = vm2.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (2 * stepSize), (3 * stepSize))); asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize)); for (AsyncInvocation<?> inv : asyncInvs) { ThreadUtils.join(inv, 60 * 000); } for (AsyncInvocation<?> inv : asyncInvs) { if (inv.exceptionOccurred()) { Assert.fail("Random region operation failed on VM_" + inv.getId(), inv.getException()); } } vm0.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); vm1.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); vm2.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); vm3.invoke(getCacheSerializableRunnableForIndexValidation(regionName, indexName)); } /** * This validator will iterate over RegionEntries and verify their corresponding index key and * entry presence in index valuesToEntriesMap. */ private static class IndexValidator { public IndexValidator() {} private boolean isValidationInProgress; /** * Validation is done in the end of test on all indexes of a region by verifying last on a * region key and verifying state of index based on the last operation. * * @param region being validated for all of its indexes. */ public void validate(Region region) { // Get List of All indexes. Collection<Index> indexes = ((LocalRegion) region).getIndexManager().getIndexes(); // validate each index one by one for (Index index : indexes) { if (region instanceof PartitionedRegion) { validateOnPR((PartitionedRegion) region, (PartitionedIndex) index); } else { validate(region, index); } } } private void validate(Region region, Index index) { // Get index expression String indexExpr = index.getIndexedExpression(); int expectedIndexSize = 0; int expectedNullEntries = 0; int expectedUndefinedEntries = 0; // Lets check if it contains a '.' if (indexExpr.indexOf(".") != -1) { indexExpr = indexExpr.substring(indexExpr.indexOf(".") + 1); } // do a get<indexExpr>() on each region value and verify if the // evaluated index key is part of index and has RE as a reference to it Collection<RegionEntry> entries = ((LocalRegion) region).entries.regionEntries(); for (RegionEntry internalEntry : entries) { Object value = internalEntry.getValueInVM((LocalRegion) region); if (value instanceof CachedDeserializable) { value = ((CachedDeserializable) value).getDeserializedValue(region, internalEntry); } if (indexExpr.equals("ID")) { // Compact Range Index if (index instanceof CompactRangeIndex) { // Ignore invalid values. if (value != Token.INVALID && value != Token.TOMBSTONE) { LogWriterUtils.getLogWriter().info("Portfolio: " + value); Integer ID = ((Portfolio) value).getID(); assertTrue( "Did not find index key for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName(), ((CompactRangeIndex) index).getIndexStorage().get(ID) != null); // Get Index value for the evaluated index key. CloseableIterator<IndexStoreEntry> valuesForKeyIterator = null; try { valuesForKeyIterator = ((CompactRangeIndex) index).getIndexStorage().get(ID); // Check if RegionEntry is present in Index for the key // evaluated from // region value. while (valuesForKeyIterator.hasNext()) { assertTrue( "Did not find index value for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName() + " For index key: " + ID, (((MemoryIndexStoreEntry) valuesForKeyIterator.next()) .getRegionEntry() == internalEntry)); } } finally { if (valuesForKeyIterator != null) { valuesForKeyIterator.close(); } } if (ID != IndexManager.NULL) { expectedIndexSize++; } else { expectedNullEntries++; } } else { LogWriterUtils.getLogWriter().info(internalEntry.getKey() + ""); expectedUndefinedEntries++; } } } else if (indexExpr.equals("secId")) { if (index instanceof RangeIndex) { // Ignore invalid values. if (value != Token.INVALID && value != Token.TOMBSTONE) { Collection<Position> positions = ((Portfolio) value).positions.values(); for (Position pos : positions) { if (pos != null) { LogWriterUtils.getLogWriter() .info("Portfolio: " + value + "Position: " + pos); String secId = pos.secId; assertTrue( "Did not find index key for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName(), ((RangeIndex) index).valueToEntriesMap.containsKey(secId)); // Get Index value for the evaluated index key. Object valuesForKey = ((RangeIndex) index).valueToEntriesMap.get(secId); // Check if RegionEntry is present in Index for the key evaluated from // region value. if (!(valuesForKey instanceof RegionEntryToValuesMap)) { assertTrue( "Did not find index value for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName() + " For index key: " + secId, (valuesForKey == internalEntry)); } else { assertTrue( "Did not find index value for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName() + " For index key: " + secId, (((RegionEntryToValuesMap) valuesForKey).containsEntry(internalEntry))); } if (secId != null) { expectedIndexSize++; } else { expectedNullEntries++; } } else { expectedUndefinedEntries++; } } } } } } // Validate sizes for index map, null and undefined maps. int actualSize = 0; if (index instanceof CompactRangeIndex) { CloseableIterator<IndexStoreEntry> iter = null; try { iter = ((CompactRangeIndex) index).getIndexStorage().iterator(null); while (iter.hasNext()) { Object value = iter.next(); // getLogWriter().info( // "Index Values : " + value); actualSize++; } } finally { if (iter != null) { iter.close(); } } } if (index instanceof RangeIndex) { for (Object value : ((RangeIndex) index).valueToEntriesMap.values()) { if (value instanceof RegionEntry) { actualSize++; } else { // for (Object obj: ((RegionEntryToValuesMap)value).map.values()) { // getLogWriter().info("Index Values : "+ obj.toString()); // } actualSize += ((RegionEntryToValuesMap) value).getNumValues(); } } } IndexStatistics stats = index.getStatistics(); if (index instanceof CompactRangeIndex) { LogWriterUtils.getLogWriter().info(" Actual Size of Index is: " + actualSize); /* * getLogWriter().info( " Actual Size of Index is: " + actualSize + " Undefined size is: " + * ((CompactRangeIndex) index).undefinedMappedEntries.size() + " And NULL size is: " + * ((CompactRangeIndex) index).nullMappedEntries.size()); for (Object obj : * ((CompactRangeIndex) index).undefinedMappedEntries .toArray()) { * getLogWriter().info(((RegionEntry) obj).getKey() + ""); } */ LogWriterUtils.getLogWriter() .info(" Expected Size of Index is: " + expectedIndexSize + " Undefined size is: " + expectedUndefinedEntries + " And NULL size is: " + expectedNullEntries); assertEquals( "No of index keys NOT equals the no shown in statistics for index:" + index.getName(), ((CompactRangeIndex) index).getIndexStorage().size(), stats.getNumberOfKeys()); } else { LogWriterUtils.getLogWriter() .info(" Actual Size of Index is: " + actualSize + " Undefined size is: " + ((RangeIndex) index).undefinedMappedEntries.getNumEntries() + " And NULL size is: " + ((RangeIndex) index).nullMappedEntries.getNumEntries()); for (Object obj : ((RangeIndex) index).undefinedMappedEntries.map.keySet()) { LogWriterUtils.getLogWriter().info(((RegionEntry) obj).getKey() + ""); } LogWriterUtils.getLogWriter() .info(" Expected Size of Index is: " + expectedIndexSize + " Undefined size is: " + expectedUndefinedEntries + " And NULL size is: " + expectedNullEntries); assertEquals( "No of index keys NOT equals the no shown in statistics for index:" + index.getName(), ((RangeIndex) index).valueToEntriesMap.keySet().size(), stats.getNumberOfKeys()); } assertEquals( "No of index entries NOT equal the No of RegionEntries Basec on statistics for index:" + index.getName(), (expectedIndexSize + expectedNullEntries), stats.getNumberOfValues()); assertEquals( "No of index entries NOT equals the No of RegionEntries for index:" + index.getName(), expectedIndexSize, actualSize); GemFireCacheImpl.getInstance().getLogger().fine("Finishing the validation for region: " + region.getFullPath() + " and Index: " + index.getName()); } private void validateOnPR(PartitionedRegion pr, PartitionedIndex ind) { // Get index expression String indexExpr = ind.getIndexedExpression(); int expectedIndexSize = 0; int expectedNullEntries = 0; int expectedUndefinedEntries = 0; // Lets check if it contains a '.' if (indexExpr.indexOf(".") != -1) { indexExpr = indexExpr.substring(indexExpr.indexOf(".") + 1); } int actualValueSize = 0; int actualKeySize = 0; for (Object idx : ind.getBucketIndexes()) { Index index = (Index) idx; assertTrue("Bucket stats are different than PR stats for bucket: " + index.getRegion(), index.getStatistics() == ind.getStatistics()); Region region = index.getRegion(); // do a get<indexExpr>() on each region value and verify if the // evaluated index key is part of index and has RE as a reference to it Collection<RegionEntry> entries = ((LocalRegion) region).entries.regionEntries(); for (RegionEntry internalEntry : entries) { Object value = internalEntry.getValueInVM((LocalRegion) region); if (value instanceof CachedDeserializable) { value = ((CachedDeserializable) value).getDeserializedValue(region, internalEntry); } if (indexExpr.equals("ID")) { // Compact Range Index if (index instanceof CompactRangeIndex) { // Ignore invalid values. if (value != Token.INVALID && value != Token.TOMBSTONE) { LogWriterUtils.getLogWriter().info("Portfolio: " + value); Integer ID = ((Portfolio) value).getID(); assertTrue( "Did not find index key for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName(), ((CompactRangeIndex) index).getIndexStorage().get(ID) != null); // Get Index value for the evaluated index key. CloseableIterator<IndexStoreEntry> valuesForKeyIterator = null; try { valuesForKeyIterator = ((CompactRangeIndex) index).getIndexStorage().get(ID); // Check if RegionEntry is present in Index for the key // evaluated from // region value. while (valuesForKeyIterator.hasNext()) { assertTrue( "Did not find index value for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName() + " For index key: " + ID, (((MemoryIndexStoreEntry) valuesForKeyIterator.next()) .getRegionEntry() == internalEntry)); } } finally { if (valuesForKeyIterator != null) { valuesForKeyIterator.close(); } } if (ID != IndexManager.NULL) { expectedIndexSize++; } else { expectedNullEntries++; } } else { expectedUndefinedEntries++; } } } else if (indexExpr.equals("secId")) { if (index instanceof RangeIndex) { // Ignore invalid values. if (value != Token.INVALID && value != Token.TOMBSTONE) { Collection<Position> positions = ((Portfolio) value).positions.values(); for (Position pos : positions) { if (pos != null) { LogWriterUtils.getLogWriter() .info("Portfolio: " + value + "Position: " + pos); String secId = pos.secId; assertTrue( "Did not find index key for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName(), ((RangeIndex) index).valueToEntriesMap.containsKey(secId)); // Get Index value for the evaluated index key. Object valuesForKey = ((RangeIndex) index).valueToEntriesMap.get(secId); // Check if RegionEntry is present in Index for the key evaluated from // region value. if (!(valuesForKey instanceof RegionEntryToValuesMap)) { assertTrue( "Did not find index value for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName() + " For index key: " + secId, (valuesForKey == internalEntry)); } else { assertTrue( "Did not find index value for REgionEntry [key: " + internalEntry.getKey() + " , value: " + value + " ] in index: " + index.getName() + " For index key: " + secId, (((RegionEntryToValuesMap) valuesForKey).containsEntry(internalEntry))); } if (secId != null) { expectedIndexSize++; } else { expectedNullEntries++; } } else { expectedUndefinedEntries++; } } } } } } // Validate sizes for index map, null and undefined maps. if (index instanceof CompactRangeIndex) { CloseableIterator<IndexStoreEntry> iter = null; try { iter = ((CompactRangeIndex) index).getIndexStorage().iterator(null); while (iter.hasNext()) { LogWriterUtils.getLogWriter() .info("Index Values : " + iter.next().getDeserializedValue()); actualValueSize++; } } finally { if (iter != null) { iter.close(); } } } if (index instanceof RangeIndex) { for (Object value : ((RangeIndex) index).valueToEntriesMap.values()) { if (value instanceof RegionEntry) { actualValueSize++; } else { actualValueSize += ((RegionEntryToValuesMap) value).getNumValues(); } } } if (index instanceof CompactRangeIndex) { actualKeySize += ((CompactRangeIndex) index).getIndexStorage().size(); } else { actualKeySize += ((RangeIndex) index).valueToEntriesMap.keySet().size(); } } assertEquals( "No of index entries NOT equals the No of RegionENtries NOT based on stats for index:" + ind.getName(), expectedIndexSize, actualValueSize); IndexStatistics stats = ind.getStatistics(); assertEquals( "No of index entries NOT equals the No of RegionENtries based on statistics for index:" + ind.getName(), (expectedIndexSize + expectedNullEntries), stats.getNumberOfValues()); GemFireCacheImpl.getInstance().getLogger().fine("Finishing the validation for region: " + pr.getFullPath() + " and Index: " + ind.getName()); } } }
apache/hadoop
36,387
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.security; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.kerby.kerberos.kerb.keytab.Keytab; import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry; import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey; import org.apache.kerby.kerberos.kerb.type.base.PrincipalName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.crypto.Cipher; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.regex.Pattern; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*; import static org.apache.hadoop.security.UserGroupInformation.*; import static org.apache.hadoop.security.authentication.util.KerberosUtil.*; import static org.apache.hadoop.util.StringUtils.popOption; import static org.apache.hadoop.util.StringUtils.popOptionWithArgument; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKEN_FILES; /** * Kerberos diagnostics * * This operation expands some of the diagnostic output of the security code, * but not all. For completeness * * Set the environment variable {@code HADOOP_JAAS_DEBUG=true} * Set the log level for {@code org.apache.hadoop.security=DEBUG} */ public class KDiag extends Configured implements Tool, Closeable { private static final Logger LOG = LoggerFactory.getLogger(KDiag.class); /** * Location of the kerberos ticket cache as passed down via an environment * variable. This is what kinit will use by default: {@value} */ public static final String KRB5_CCNAME = "KRB5CCNAME"; /** * Location of main kerberos configuration file as passed down via an * environment variable. */ public static final String KRB5_CONFIG = "KRB5_CONFIG"; public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; public static final String JAVA_SECURITY_KRB5_REALM = "java.security.krb5.realm"; public static final String JAVA_SECURITY_KRB5_KDC_ADDRESS = "java.security.krb5.kdc"; public static final String SUN_SECURITY_KRB5_DEBUG = "sun.security.krb5.debug"; public static final String SUN_SECURITY_SPNEGO_DEBUG = "sun.security.spnego.debug"; public static final String SUN_SECURITY_JAAS_FILE = "java.security.auth.login.config"; public static final String KERBEROS_KINIT_COMMAND = "hadoop.kerberos.kinit.command"; public static final String HADOOP_AUTHENTICATION_IS_DISABLED = "Hadoop authentication is disabled"; public static final String UNSET = "(unset)"; /** * String seen in {@code getDefaultRealm()} exceptions if the user has * no realm: {@value}. */ public static final String NO_DEFAULT_REALM = "Cannot locate default realm"; /** * The exit code for a failure of the diagnostics: 41 == HTTP 401 == unauth. */ public static final int KDIAG_FAILURE = 41; public static final String DFS_DATA_TRANSFER_SASLPROPERTIES_RESOLVER_CLASS = "dfs.data.transfer.saslproperties.resolver.class"; public static final String DFS_DATA_TRANSFER_PROTECTION = "dfs.data.transfer.protection"; public static final String ETC_KRB5_CONF = "/etc/krb5.conf"; public static final String ETC_NTP = "/etc/ntp.conf"; public static final String HADOOP_JAAS_DEBUG = "HADOOP_JAAS_DEBUG"; private PrintWriter out; private File keytab; private String principal; private long minKeyLength = 256; private boolean securityRequired; private boolean nofail = false; private boolean nologin = false; private boolean jaas = false; private boolean checkShortName = false; /** * A pattern that recognizes simple/non-simple names. Per KerberosName */ private static final Pattern nonSimplePattern = Pattern.compile("[/@]"); /** * Flag set to true if a {@link #verify(boolean, String, String, Object...)} * probe failed. */ private boolean probeHasFailed = false; public static final String CAT_CONFIG = "CONFIG"; public static final String CAT_JAAS = "JAAS"; public static final String CAT_JVM = "JVM"; public static final String CAT_KERBEROS = "KERBEROS"; public static final String CAT_LOGIN = "LOGIN"; public static final String CAT_OS = "JAAS"; public static final String CAT_SASL = "SASL"; public static final String CAT_UGI = "UGI"; public static final String CAT_TOKEN = "TOKEN"; public static final String ARG_KEYLEN = "--keylen"; public static final String ARG_KEYTAB = "--keytab"; public static final String ARG_JAAS = "--jaas"; public static final String ARG_NOFAIL = "--nofail"; public static final String ARG_NOLOGIN = "--nologin"; public static final String ARG_OUTPUT = "--out"; public static final String ARG_PRINCIPAL = "--principal"; public static final String ARG_RESOURCE = "--resource"; public static final String ARG_SECURE = "--secure"; public static final String ARG_VERIFYSHORTNAME = "--verifyshortname"; @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") public KDiag(Configuration conf, PrintWriter out, File keytab, String principal, long minKeyLength, boolean securityRequired) { super(conf); this.keytab = keytab; this.principal = principal; this.out = out; this.minKeyLength = minKeyLength; this.securityRequired = securityRequired; } public KDiag() { } @Override public void close() throws IOException { flush(); if (out != null) { out.close(); } } @Override public int run(String[] argv) throws Exception { List<String> args = new LinkedList<>(Arrays.asList(argv)); String keytabName = popOptionWithArgument(ARG_KEYTAB, args); if (keytabName != null) { keytab = new File(keytabName); } principal = popOptionWithArgument(ARG_PRINCIPAL, args); String outf = popOptionWithArgument(ARG_OUTPUT, args); String mkl = popOptionWithArgument(ARG_KEYLEN, args); if (mkl != null) { minKeyLength = Integer.parseInt(mkl); } securityRequired = popOption(ARG_SECURE, args); nofail = popOption(ARG_NOFAIL, args); jaas = popOption(ARG_JAAS, args); nologin = popOption(ARG_NOLOGIN, args); checkShortName = popOption(ARG_VERIFYSHORTNAME, args); // look for list of resources String resource; while (null != (resource = popOptionWithArgument(ARG_RESOURCE, args))) { // loading a resource LOG.info("Loading resource {}", resource); try (InputStream in = getClass().getClassLoader().getResourceAsStream(resource)) { if (verify(in != null, CAT_CONFIG, "No resource %s", resource)) { Configuration.addDefaultResource(resource); } } } // look for any leftovers if (!args.isEmpty()) { println("Unknown arguments in command:"); for (String s : args) { println(" \"%s\"", s); } println(); println(usage()); return -1; } if (outf != null) { println("Printing output to %s", outf); out = new PrintWriter(new File(outf), "UTF-8"); } execute(); return probeHasFailed ? KDIAG_FAILURE : 0; } private String usage() { return "KDiag: Diagnose Kerberos Problems\n" + arg("-D", "key=value", "Define a configuration option") + arg(ARG_JAAS, "", "Require a JAAS file to be defined in " + SUN_SECURITY_JAAS_FILE) + arg(ARG_KEYLEN, "<keylen>", "Require a minimum size for encryption keys supported by the JVM." + " Default value : "+ minKeyLength) + arg(ARG_KEYTAB, "<keytab> " + ARG_PRINCIPAL + " <principal>", "Login from a keytab as a specific principal") + arg(ARG_NOFAIL, "", "Do not fail on the first problem") + arg(ARG_NOLOGIN, "", "Do not attempt to log in") + arg(ARG_OUTPUT, "<file>", "Write output to a file") + arg(ARG_RESOURCE, "<resource>", "Load an XML configuration resource") + arg(ARG_SECURE, "", "Require the hadoop configuration to be secure") + arg(ARG_VERIFYSHORTNAME, ARG_PRINCIPAL + " <principal>", "Verify the short name of the specific principal does not contain '@' or '/'"); } private String arg(String name, String params, String meaning) { return String.format(" [%s%s%s] : %s", name, (!params.isEmpty() ? " " : ""), params, meaning) + ".\n"; } /** * Execute diagnostics. * <p> * Things it would be nice if UGI made accessible * <ol> * <li>A way to enable JAAS debug programatically</li> * <li>Access to the TGT</li> * </ol> * @return true if security was enabled and all probes were successful * @throws KerberosDiagsFailure explicitly raised failure * @throws Exception other security problems */ @SuppressWarnings("deprecation") public boolean execute() throws Exception { title("Kerberos Diagnostics scan at %s", new Date(System.currentTimeMillis())); // check that the machine has a name println("Hostname = %s", InetAddress.getLocalHost().getCanonicalHostName()); println("%s = %d", ARG_KEYLEN, minKeyLength); println("%s = %s", ARG_KEYTAB, keytab); println("%s = %s", ARG_PRINCIPAL, principal); println("%s = %s", ARG_VERIFYSHORTNAME, checkShortName); // Fail fast on a JVM without JCE installed. validateKeyLength(); // look at realm println("JVM Kerberos Login Module = %s", getKrb5LoginModuleName()); title("Core System Properties"); for (String prop : new String[]{ "user.name", "java.version", "java.vendor", JAVA_SECURITY_KRB5_CONF, JAVA_SECURITY_KRB5_REALM, JAVA_SECURITY_KRB5_KDC_ADDRESS, SUN_SECURITY_KRB5_DEBUG, SUN_SECURITY_SPNEGO_DEBUG, SUN_SECURITY_JAAS_FILE }) { printSysprop(prop); } endln(); title("All System Properties"); ArrayList<String> propList = new ArrayList<>( System.getProperties().stringPropertyNames()); Collections.sort(propList, String.CASE_INSENSITIVE_ORDER); for (String s : propList) { printSysprop(s); } endln(); title("Environment Variables"); for (String env : new String[]{ HADOOP_JAAS_DEBUG, KRB5_CCNAME, KRB5_CONFIG, HADOOP_USER_NAME, HADOOP_PROXY_USER, HADOOP_TOKEN_FILE_LOCATION, "HADOOP_SECURE_LOG", "HADOOP_OPTS", "HADOOP_CLIENT_OPTS", }) { printEnv(env); } endln(); title("Configuration Options"); for (String prop : new String[]{ KERBEROS_KINIT_COMMAND, HADOOP_SECURITY_AUTHENTICATION, HADOOP_SECURITY_AUTHORIZATION, "hadoop.kerberos.min.seconds.before.relogin", // not in 2.6 "hadoop.security.dns.interface", // not in 2.6 "hadoop.security.dns.nameserver", // not in 2.6 HADOOP_RPC_PROTECTION, HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS, HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX, HADOOP_SECURITY_GROUP_MAPPING, "hadoop.security.impersonation.provider.class", // not in 2.6 DFS_DATA_TRANSFER_PROTECTION, // HDFS DFS_DATA_TRANSFER_SASLPROPERTIES_RESOLVER_CLASS // HDFS }) { printConfOpt(prop); } // check that authentication is enabled Configuration conf = getConf(); if (isSimpleAuthentication(conf)) { println(HADOOP_AUTHENTICATION_IS_DISABLED); failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED); // no security, warn LOG.warn("Security is not enabled for the Hadoop cluster"); } else { if (isSimpleAuthentication(new Configuration())) { LOG.warn("The default cluster security is insecure"); failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED); } } // now the big test: login, then try again boolean krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG); boolean spnegoDebug = getAndSet(SUN_SECURITY_SPNEGO_DEBUG); try { UserGroupInformation.setConfiguration(conf); validateHadoopTokenFiles(conf); validateKrb5File(); printDefaultRealm(); validateSasl(HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS); if (conf.get(DFS_DATA_TRANSFER_SASLPROPERTIES_RESOLVER_CLASS) != null) { validateSasl(DFS_DATA_TRANSFER_SASLPROPERTIES_RESOLVER_CLASS); } validateKinitExecutable(); validateJAAS(jaas); validateNTPConf(); if (checkShortName) { validateShortName(); } if (!nologin) { title("Logging in"); if (keytab != null) { dumpKeytab(keytab); loginFromKeytab(); } else { UserGroupInformation loginUser = getLoginUser(); dumpUGI("Log in user", loginUser); validateUGI("Login user", loginUser); println("Ticket based login: %b", isLoginTicketBased()); println("Keytab based login: %b", isLoginKeytabBased()); } } return true; } finally { // restore original system properties System.setProperty(SUN_SECURITY_KRB5_DEBUG, Boolean.toString(krb5Debug)); System.setProperty(SUN_SECURITY_SPNEGO_DEBUG, Boolean.toString(spnegoDebug)); } } /** * Is the authentication method of this configuration "simple"? * @param conf configuration to check * @return true if auth is simple (i.e. not kerberos) */ protected boolean isSimpleAuthentication(Configuration conf) { return SecurityUtil.getAuthenticationMethod(conf) .equals(AuthenticationMethod.SIMPLE); } /** * Fail fast on a JVM without JCE installed. * * This is a recurrent problem * (that is: it keeps creeping back with JVM updates); * a fast failure is the best tactic. * @throws NoSuchAlgorithmException when a particular cryptographic algorithm is * requested but is not available in the environment. */ protected void validateKeyLength() throws NoSuchAlgorithmException { int aesLen = Cipher.getMaxAllowedKeyLength("AES"); println("Maximum AES encryption key length %d bits", aesLen); verify(minKeyLength <= aesLen, CAT_JVM, "Java Cryptography Extensions are not installed on this JVM." + " Maximum supported key length %s - minimum required %d", aesLen, minKeyLength); } /** * Verify whether auth_to_local rules transform a principal name * <p> * Having a local user name "bar@foo.com" may be harmless, so it is noted at * info. However if what was intended is a transformation to "bar" * it can be difficult to debug, hence this check. */ protected void validateShortName() { failif(principal == null, CAT_KERBEROS, "No principal defined"); try { KerberosName kn = new KerberosName(principal); String result = kn.getShortName(); if (nonSimplePattern.matcher(result).find()) { warn(CAT_KERBEROS, principal + " short name: " + result + " still contains @ or /"); } } catch (IOException e) { throw new KerberosDiagsFailure(CAT_KERBEROS, e, "Failed to get short name for " + principal, e); } catch (IllegalArgumentException e) { error(CAT_KERBEROS, "KerberosName(" + principal + ") failed: %s\n%s", e, StringUtils.stringifyException(e)); } } /** * Get the default realm. * <p> * Not having a default realm may be harmless, so is noted at info. * All other invocation failures are downgraded to warn, as * follow-on actions may still work. * Failure to invoke the method via introspection is considered a failure, * as it's a sign of JVM compatibility issues that may have other * consequences */ protected void printDefaultRealm() { try { String defaultRealm = getDefaultRealm(); println("Default Realm = %s", defaultRealm); if (defaultRealm == null) { warn(CAT_KERBEROS, "Host has no default realm"); } } catch (ClassNotFoundException | IllegalAccessException | NoSuchMethodException e) { throw new KerberosDiagsFailure(CAT_JVM, e, "Failed to invoke krb5.Config.getDefaultRealm: %s: " +e, e); } catch (InvocationTargetException e) { Throwable cause = e.getCause() != null ? e.getCause() : e; if (cause.toString().contains(NO_DEFAULT_REALM)) { // exception raised if there is no default realm. This is not // always a problem, so downgrade to a message. warn(CAT_KERBEROS, "Host has no default realm"); LOG.debug(cause.toString(), cause); } else { error(CAT_KERBEROS, "Kerberos.getDefaultRealm() failed: %s\n%s", cause, StringUtils.stringifyException(cause)); } } } /** * Validate that hadoop.token.files (if specified) exist and are valid. * @throws ClassNotFoundException * @throws SecurityException * @throws NoSuchMethodException * @throws KerberosDiagsFailure */ private void validateHadoopTokenFiles(Configuration conf) throws ClassNotFoundException, KerberosDiagsFailure, NoSuchMethodException, SecurityException { title("Locating Hadoop token files"); String tokenFileLocation = System.getProperty(HADOOP_TOKEN_FILES); if(tokenFileLocation != null) { println("Found " + HADOOP_TOKEN_FILES + " in system properties : " + tokenFileLocation); } if(conf.get(HADOOP_TOKEN_FILES) != null) { println("Found " + HADOOP_TOKEN_FILES + " in hadoop configuration : " + conf.get(HADOOP_TOKEN_FILES)); if(System.getProperty(HADOOP_TOKEN_FILES) != null) { println(HADOOP_TOKEN_FILES + " in the system properties overrides the" + " one specified in hadoop configuration"); } else { tokenFileLocation = conf.get(HADOOP_TOKEN_FILES); } } if (tokenFileLocation != null) { for (String tokenFileName: StringUtils.getTrimmedStrings(tokenFileLocation)) { if (tokenFileName.length() > 0) { File tokenFile = new File(tokenFileName); verifyFileIsValid(tokenFile, CAT_TOKEN, "token"); verify(tokenFile, conf, CAT_TOKEN, "token"); } } } } /** * Locate the {@code krb5.conf} file and dump it. * * No-op on windows. * @throws IOException problems reading the file. */ private void validateKrb5File() throws IOException { if (!Shell.WINDOWS) { title("Locating Kerberos configuration file"); String krbPath = ETC_KRB5_CONF; String jvmKrbPath = System.getProperty(JAVA_SECURITY_KRB5_CONF); if (jvmKrbPath != null && !jvmKrbPath.isEmpty()) { println("Setting kerberos path from sysprop %s: \"%s\"", JAVA_SECURITY_KRB5_CONF, jvmKrbPath); krbPath = jvmKrbPath; } String krb5name = System.getenv(KRB5_CONFIG); if (krb5name != null) { println("Setting kerberos path from environment variable %s: \"%s\"", KRB5_CONFIG, krb5name); krbPath = krb5name; if (jvmKrbPath != null) { println("Warning - both %s and %s were set - %s takes priority", JAVA_SECURITY_KRB5_CONF, KRB5_CONFIG, KRB5_CONFIG); } } File krbFile = new File(krbPath); println("Kerberos configuration file = %s", krbFile); dump(krbFile); endln(); } } /** * Dump a keytab: list all principals. * * @param keytabFile the keytab file * @throws IOException IO problems */ private void dumpKeytab(File keytabFile) throws IOException { title("Examining keytab %s", keytabFile); File kt = keytabFile.getCanonicalFile(); verifyFileIsValid(kt, CAT_KERBEROS, "keytab"); Keytab loadKeytab = Keytab.loadKeytab(kt); List<PrincipalName> principals = loadKeytab.getPrincipals(); println("keytab principal count: %d", principals.size()); int entrySize = 0; for (PrincipalName princ : principals) { List<KeytabEntry> entries = loadKeytab.getKeytabEntries(princ); entrySize = entrySize + entries.size(); for (KeytabEntry entry : entries) { EncryptionKey key = entry.getKey(); println(" %s: version=%d expires=%s encryption=%s", entry.getPrincipal(), entry.getKvno(), entry.getTimestamp(), key.getKeyType()); } } println("keytab entry count: %d", entrySize); endln(); } /** * Log in from a keytab, dump the UGI, validate it, then try and log in again. * * That second-time login catches JVM/Hadoop compatibility problems. * @throws IOException Keytab loading problems */ private void loginFromKeytab() throws IOException { UserGroupInformation ugi; String identity; if (keytab != null) { File kt = keytab.getCanonicalFile(); println("Using keytab %s principal %s", kt, principal); identity = principal; failif(principal == null, CAT_KERBEROS, "No principal defined"); ugi = loginUserFromKeytabAndReturnUGI(principal, kt.getPath()); dumpUGI(identity, ugi); validateUGI(principal, ugi); title("Attempting to relogin"); try { // package scoped -hence the reason why this class must be in the // hadoop.security package setShouldRenewImmediatelyForTests(true); // attempt a new login ugi.reloginFromKeytab(); } catch (IllegalAccessError e) { // if you've built this class into an independent JAR, package-access // may fail. Downgrade warn(CAT_UGI, "Failed to reset UGI -and so could not try to relogin"); LOG.debug("Failed to reset UGI: {}", e, e); } } else { println("No keytab: attempting to log in is as current user"); } } /** * Dump a UGI. * * @param title title of this section * @param ugi UGI to dump * @throws IOException */ private void dumpUGI(String title, UserGroupInformation ugi) throws IOException { title(title); println("UGI instance = %s", ugi); println("Has kerberos credentials: %b", ugi.hasKerberosCredentials()); println("Authentication method: %s", ugi.getAuthenticationMethod()); println("Real Authentication method: %s", ugi.getRealAuthenticationMethod()); title("Group names"); for (String name : ugi.getGroupNames()) { println(name); } title("Credentials"); List<Text> secretKeys = ugi.getCredentials().getAllSecretKeys(); title("Secret keys"); if (!secretKeys.isEmpty()) { for (Text secret: secretKeys) { println("%s", secret); } } else { println("(none)"); } dumpTokens(ugi); } /** * Validate the UGI: verify it is kerberized. * @param messagePrefix message in exceptions * @param user user to validate */ private void validateUGI(String messagePrefix, UserGroupInformation user) { if (verify(user.getAuthenticationMethod() == AuthenticationMethod.KERBEROS, CAT_LOGIN, "User %s is not authenticated by Kerberos", user)) { verify(user.hasKerberosCredentials(), CAT_LOGIN, "%s: No kerberos credentials for %s", messagePrefix, user); verify(user.getAuthenticationMethod() != null, CAT_LOGIN, "%s: Null AuthenticationMethod for %s", messagePrefix, user); } } /** * A cursory look at the {@code kinit} executable. * * If it is an absolute path: it must exist with a size > 0. * If it is just a command, it has to be on the path. There's no check * for that -but the PATH is printed out. */ private void validateKinitExecutable() { String kinit = getConf().getTrimmed(KERBEROS_KINIT_COMMAND, ""); if (!kinit.isEmpty()) { File kinitPath = new File(kinit); println("%s = %s", KERBEROS_KINIT_COMMAND, kinitPath); if (kinitPath.isAbsolute()) { verifyFileIsValid(kinitPath, CAT_KERBEROS, KERBEROS_KINIT_COMMAND); } else { println("Executable %s is relative -must be on the PATH", kinit); printEnv("PATH"); } } } /** * Try to load the SASL resolver. * @param saslPropsResolverKey key for the SASL resolver */ private void validateSasl(String saslPropsResolverKey) { title("Resolving SASL property %s", saslPropsResolverKey); String saslPropsResolver = getConf().getTrimmed(saslPropsResolverKey); try { Class<? extends SaslPropertiesResolver> resolverClass = getConf().getClass( saslPropsResolverKey, SaslPropertiesResolver.class, SaslPropertiesResolver.class); println("Resolver is %s", resolverClass); } catch (RuntimeException e) { throw new KerberosDiagsFailure(CAT_SASL, e, "Failed to load %s class %s", saslPropsResolverKey, saslPropsResolver); } } /** * Validate any JAAS entry referenced in the {@link #SUN_SECURITY_JAAS_FILE} * property. * @param jaasRequired is JAAS required */ private void validateJAAS(boolean jaasRequired) throws IOException { String jaasFilename = System.getProperty(SUN_SECURITY_JAAS_FILE); if (jaasRequired) { verify(jaasFilename != null, CAT_JAAS, "No JAAS file specified in " + SUN_SECURITY_JAAS_FILE); } if (jaasFilename != null) { title("JAAS"); File jaasFile = new File(jaasFilename); println("JAAS file is defined in %s: %s", SUN_SECURITY_JAAS_FILE, jaasFile); verifyFileIsValid(jaasFile, CAT_JAAS, "JAAS file defined in " + SUN_SECURITY_JAAS_FILE); dump(jaasFile); endln(); } } private void validateNTPConf() throws IOException { if (!Shell.WINDOWS) { File ntpfile = new File(ETC_NTP); if (ntpfile.exists() && verifyFileIsValid(ntpfile, CAT_OS, "NTP file: " + ntpfile)) { title("NTP"); dump(ntpfile); endln(); } } } /** * Verify that a file is valid: it is a file, non-empty and readable. * @param file file * @param category category for exceptions * @param text text message * @return true if the validation held; false if it did not <i>and</i> * {@link #nofail} has disabled raising exceptions. */ private boolean verifyFileIsValid(File file, String category, String text) { return verify(file.exists(), category, "%s file does not exist: %s", text, file) && verify(file.isFile(), category, "%s path does not refer to a file: %s", text, file) && verify(file.length() != 0, category, "%s file is empty: %s", text, file) && verify(file.canRead(), category, "%s file is not readable: %s", text, file); } /** * Dump all tokens of a UGI. * @param ugi UGI to examine */ public void dumpTokens(UserGroupInformation ugi) { Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens(); title("Token Count: %d", tokens.size()); for (Token<? extends TokenIdentifier> token : tokens) { println("Token %s", token.getKind()); } endln(); } /** * Set the System property to true; return the old value for caching. * * @param sysprop property * @return the previous value */ private boolean getAndSet(String sysprop) { boolean old = Boolean.getBoolean(sysprop); System.setProperty(sysprop, "true"); return old; } /** * Flush all active output channels, including {@Code System.err}, * so as to stay in sync with any JRE log messages. */ private void flush() { if (out != null) { out.flush(); } else { System.out.flush(); } System.err.flush(); } /** * Print a line of output. This goes to any output file, or * is logged at info. The output is flushed before and after, to * try and stay in sync with JRE logging. * * @param format format string * @param args any arguments */ private void println(String format, Object... args) { flush(); String msg = String.format(format, args); if (out != null) { out.println(msg); } else { System.out.println(msg); } flush(); } /** * Print a new line */ private void println() { println(""); } /** * Print something at the end of a section */ private void endln() { println(); println("-----"); } /** * Print a title entry. * * @param format format string * @param args any arguments */ private void title(String format, Object... args) { println(); println(); println("== " + String.format(format, args) + " =="); println(); } /** * Print a system property, or {@link #UNSET} if unset. * @param property property to print */ private void printSysprop(String property) { println("%s = \"%s\"", property, System.getProperty(property, UNSET)); } /** * Print a configuration option, or {@link #UNSET} if unset. * * @param option option to print */ private void printConfOpt(String option) { println("%s = \"%s\"", option, getConf().get(option, UNSET)); } /** * Print an environment variable's name and value; printing * {@link #UNSET} if it is not set. * @param variable environment variable */ private void printEnv(String variable) { String env = System.getenv(variable); println("%s = \"%s\"", variable, env != null ? env : UNSET); } /** * Dump any file to standard out. * @param file file to dump * @throws IOException IO problems */ private void dump(File file) throws IOException { try (InputStream in = Files.newInputStream(file.toPath())) { for (String line : IOUtils.readLines(in, StandardCharsets.UTF_8)) { println("%s", line); } } } /** * Format and raise a failure. * * @param category category for exception * @param message string formatting message * @param args any arguments for the formatting * @throws KerberosDiagsFailure containing the formatted text */ private void fail(String category, String message, Object... args) throws KerberosDiagsFailure { error(category, message, args); throw new KerberosDiagsFailure(category, message, args); } /** * Assert that a condition must hold. * * If not, an exception is raised, or, if {@link #nofail} is set, * an error will be logged and the method return false. * * @param condition condition which must hold * @param category category for exception * @param message string formatting message * @param args any arguments for the formatting * @return true if the verification succeeded, false if it failed but * an exception was not raised. * @throws KerberosDiagsFailure containing the formatted text * if the condition was met */ private boolean verify(boolean condition, String category, String message, Object... args) throws KerberosDiagsFailure { if (!condition) { // condition not met: fail or report probeHasFailed = true; if (!nofail) { fail(category, message, args); } else { error(category, message, args); } return false; } else { // condition is met return true; } } /** * Verify that tokenFile contains valid Credentials. * * If not, an exception is raised, or, if {@link #nofail} is set, * an error will be logged and the method return false. * */ private boolean verify(File tokenFile, Configuration conf, String category, String message) throws KerberosDiagsFailure { try { Credentials.readTokenStorageFile(tokenFile, conf); } catch(Exception e) { if (!nofail) { fail(category, message); } else { error(category, message); } return false; } return true; } /** * Print a message as an error * @param category error category * @param message format string * @param args list of arguments */ private void error(String category, String message, Object...args) { println("ERROR: %s: %s", category, String.format(message, args)); } /** * Print a message as an warning * @param category error category * @param message format string * @param args list of arguments */ private void warn(String category, String message, Object...args) { println("WARNING: %s: %s", category, String.format(message, args)); } /** * Conditional failure with string formatted arguments. * There is no chek for the {@link #nofail} value. * @param condition failure condition * @param category category for exception * @param message string formatting message * @param args any arguments for the formatting * @throws KerberosDiagsFailure containing the formatted text * if the condition was met */ private void failif(boolean condition, String category, String message, Object... args) throws KerberosDiagsFailure { if (condition) { fail(category, message, args); } } /** * Inner entry point, with no logging or system exits. * * @param conf configuration * @param argv argument list * @return an exception * @throws Exception Exception. */ public static int exec(Configuration conf, String... argv) throws Exception { try(KDiag kdiag = new KDiag()) { return ToolRunner.run(conf, kdiag, argv); } } /** * Main entry point. * @param argv args list */ public static void main(String[] argv) { try { ExitUtil.terminate(exec(new Configuration(), argv)); } catch (ExitUtil.ExitException e) { LOG.error(e.toString()); System.exit(e.status); } catch (Exception e) { LOG.error(e.toString(), e); ExitUtil.halt(-1, e); } } /** * Diagnostics failures return the exit code 41, "unauthorized". * * They have a category, initially for testing: the category can be * validated without having to match on the entire string. */ public static class KerberosDiagsFailure extends ExitUtil.ExitException { private final String category; public KerberosDiagsFailure(String category, String message) { super(KDIAG_FAILURE, category + ": " + message); this.category = category; } public KerberosDiagsFailure(String category, String message, Object... args) { this(category, String.format(message, args)); } public KerberosDiagsFailure(String category, Throwable throwable, String message, Object... args) { this(category, message, args); initCause(throwable); } public String getCategory() { return category; } } }
googleapis/google-cloud-java
36,642
java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/PurgeUserEventsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/retail/v2/purge_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.retail.v2; /** * * * <pre> * Request message for PurgeUserEvents method. * </pre> * * Protobuf type {@code google.cloud.retail.v2.PurgeUserEventsRequest} */ public final class PurgeUserEventsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.retail.v2.PurgeUserEventsRequest) PurgeUserEventsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use PurgeUserEventsRequest.newBuilder() to construct. private PurgeUserEventsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PurgeUserEventsRequest() { parent_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PurgeUserEventsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2.PurgeConfigProto .internal_static_google_cloud_retail_v2_PurgeUserEventsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2.PurgeConfigProto .internal_static_google_cloud_retail_v2_PurgeUserEventsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2.PurgeUserEventsRequest.class, com.google.cloud.retail.v2.PurgeUserEventsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the catalog under which the events are * created. The format is * `projects/${projectId}/locations/global/catalogs/${catalogId}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the catalog under which the events are * created. The format is * `projects/${projectId}/locations/global/catalogs/${catalogId}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Required. The filter string to specify the events to be deleted with a * length limit of 5,000 characters. Empty string filter is not allowed. The * eligible fields for filtering are: * * * `eventType`: Double quoted * [UserEvent.event_type][google.cloud.retail.v2.UserEvent.event_type] string. * * `eventTime`: in ISO 8601 "zulu" format. * * `visitorId`: Double quoted string. Specifying this will delete all * events associated with a visitor. * * `userId`: Double quoted string. Specifying this will delete all events * associated with a user. * * Examples: * * * Deleting all events in a time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" * eventTime &lt; "2012-04-23T18:30:43.511Z"` * * Deleting specific eventType in time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"` * * Deleting all events for a specific visitor: * `visitorId = "visitor1024"` * * The filtering fields are assumed to have an implicit AND. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Required. The filter string to specify the events to be deleted with a * length limit of 5,000 characters. Empty string filter is not allowed. The * eligible fields for filtering are: * * * `eventType`: Double quoted * [UserEvent.event_type][google.cloud.retail.v2.UserEvent.event_type] string. * * `eventTime`: in ISO 8601 "zulu" format. * * `visitorId`: Double quoted string. Specifying this will delete all * events associated with a visitor. * * `userId`: Double quoted string. Specifying this will delete all events * associated with a user. * * Examples: * * * Deleting all events in a time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" * eventTime &lt; "2012-04-23T18:30:43.511Z"` * * Deleting specific eventType in time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"` * * Deleting all events for a specific visitor: * `visitorId = "visitor1024"` * * The filtering fields are assumed to have an implicit AND. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FORCE_FIELD_NUMBER = 3; private boolean force_ = false; /** * * * <pre> * Actually perform the purge. * If `force` is set to false, the method will return the expected purge count * without deleting any user events. * </pre> * * <code>bool force = 3;</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_); } if (force_ != false) { output.writeBool(3, force_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_); } if (force_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.retail.v2.PurgeUserEventsRequest)) { return super.equals(obj); } com.google.cloud.retail.v2.PurgeUserEventsRequest other = (com.google.cloud.retail.v2.PurgeUserEventsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (getForce() != other.getForce()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.retail.v2.PurgeUserEventsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for PurgeUserEvents method. * </pre> * * Protobuf type {@code google.cloud.retail.v2.PurgeUserEventsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.PurgeUserEventsRequest) com.google.cloud.retail.v2.PurgeUserEventsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2.PurgeConfigProto .internal_static_google_cloud_retail_v2_PurgeUserEventsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2.PurgeConfigProto .internal_static_google_cloud_retail_v2_PurgeUserEventsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2.PurgeUserEventsRequest.class, com.google.cloud.retail.v2.PurgeUserEventsRequest.Builder.class); } // Construct using com.google.cloud.retail.v2.PurgeUserEventsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; force_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.retail.v2.PurgeConfigProto .internal_static_google_cloud_retail_v2_PurgeUserEventsRequest_descriptor; } @java.lang.Override public com.google.cloud.retail.v2.PurgeUserEventsRequest getDefaultInstanceForType() { return com.google.cloud.retail.v2.PurgeUserEventsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.retail.v2.PurgeUserEventsRequest build() { com.google.cloud.retail.v2.PurgeUserEventsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.retail.v2.PurgeUserEventsRequest buildPartial() { com.google.cloud.retail.v2.PurgeUserEventsRequest result = new com.google.cloud.retail.v2.PurgeUserEventsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.retail.v2.PurgeUserEventsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.force_ = force_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.retail.v2.PurgeUserEventsRequest) { return mergeFrom((com.google.cloud.retail.v2.PurgeUserEventsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.retail.v2.PurgeUserEventsRequest other) { if (other == com.google.cloud.retail.v2.PurgeUserEventsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (other.getForce() != false) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { force_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the catalog under which the events are * created. The format is * `projects/${projectId}/locations/global/catalogs/${catalogId}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the catalog under which the events are * created. The format is * `projects/${projectId}/locations/global/catalogs/${catalogId}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the catalog under which the events are * created. The format is * `projects/${projectId}/locations/global/catalogs/${catalogId}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the catalog under which the events are * created. The format is * `projects/${projectId}/locations/global/catalogs/${catalogId}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the catalog under which the events are * created. The format is * `projects/${projectId}/locations/global/catalogs/${catalogId}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Required. The filter string to specify the events to be deleted with a * length limit of 5,000 characters. Empty string filter is not allowed. The * eligible fields for filtering are: * * * `eventType`: Double quoted * [UserEvent.event_type][google.cloud.retail.v2.UserEvent.event_type] string. * * `eventTime`: in ISO 8601 "zulu" format. * * `visitorId`: Double quoted string. Specifying this will delete all * events associated with a visitor. * * `userId`: Double quoted string. Specifying this will delete all events * associated with a user. * * Examples: * * * Deleting all events in a time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" * eventTime &lt; "2012-04-23T18:30:43.511Z"` * * Deleting specific eventType in time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"` * * Deleting all events for a specific visitor: * `visitorId = "visitor1024"` * * The filtering fields are assumed to have an implicit AND. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The filter string to specify the events to be deleted with a * length limit of 5,000 characters. Empty string filter is not allowed. The * eligible fields for filtering are: * * * `eventType`: Double quoted * [UserEvent.event_type][google.cloud.retail.v2.UserEvent.event_type] string. * * `eventTime`: in ISO 8601 "zulu" format. * * `visitorId`: Double quoted string. Specifying this will delete all * events associated with a visitor. * * `userId`: Double quoted string. Specifying this will delete all events * associated with a user. * * Examples: * * * Deleting all events in a time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" * eventTime &lt; "2012-04-23T18:30:43.511Z"` * * Deleting specific eventType in time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"` * * Deleting all events for a specific visitor: * `visitorId = "visitor1024"` * * The filtering fields are assumed to have an implicit AND. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The filter string to specify the events to be deleted with a * length limit of 5,000 characters. Empty string filter is not allowed. The * eligible fields for filtering are: * * * `eventType`: Double quoted * [UserEvent.event_type][google.cloud.retail.v2.UserEvent.event_type] string. * * `eventTime`: in ISO 8601 "zulu" format. * * `visitorId`: Double quoted string. Specifying this will delete all * events associated with a visitor. * * `userId`: Double quoted string. Specifying this will delete all events * associated with a user. * * Examples: * * * Deleting all events in a time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" * eventTime &lt; "2012-04-23T18:30:43.511Z"` * * Deleting specific eventType in time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"` * * Deleting all events for a specific visitor: * `visitorId = "visitor1024"` * * The filtering fields are assumed to have an implicit AND. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The filter string to specify the events to be deleted with a * length limit of 5,000 characters. Empty string filter is not allowed. The * eligible fields for filtering are: * * * `eventType`: Double quoted * [UserEvent.event_type][google.cloud.retail.v2.UserEvent.event_type] string. * * `eventTime`: in ISO 8601 "zulu" format. * * `visitorId`: Double quoted string. Specifying this will delete all * events associated with a visitor. * * `userId`: Double quoted string. Specifying this will delete all events * associated with a user. * * Examples: * * * Deleting all events in a time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" * eventTime &lt; "2012-04-23T18:30:43.511Z"` * * Deleting specific eventType in time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"` * * Deleting all events for a specific visitor: * `visitorId = "visitor1024"` * * The filtering fields are assumed to have an implicit AND. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The filter string to specify the events to be deleted with a * length limit of 5,000 characters. Empty string filter is not allowed. The * eligible fields for filtering are: * * * `eventType`: Double quoted * [UserEvent.event_type][google.cloud.retail.v2.UserEvent.event_type] string. * * `eventTime`: in ISO 8601 "zulu" format. * * `visitorId`: Double quoted string. Specifying this will delete all * events associated with a visitor. * * `userId`: Double quoted string. Specifying this will delete all events * associated with a user. * * Examples: * * * Deleting all events in a time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" * eventTime &lt; "2012-04-23T18:30:43.511Z"` * * Deleting specific eventType in time range: * `eventTime &gt; "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"` * * Deleting all events for a specific visitor: * `visitorId = "visitor1024"` * * The filtering fields are assumed to have an implicit AND. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean force_; /** * * * <pre> * Actually perform the purge. * If `force` is set to false, the method will return the expected purge count * without deleting any user events. * </pre> * * <code>bool force = 3;</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } /** * * * <pre> * Actually perform the purge. * If `force` is set to false, the method will return the expected purge count * without deleting any user events. * </pre> * * <code>bool force = 3;</code> * * @param value The force to set. * @return This builder for chaining. */ public Builder setForce(boolean value) { force_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Actually perform the purge. * If `force` is set to false, the method will return the expected purge count * without deleting any user events. * </pre> * * <code>bool force = 3;</code> * * @return This builder for chaining. */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000004); force_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.PurgeUserEventsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.retail.v2.PurgeUserEventsRequest) private static final com.google.cloud.retail.v2.PurgeUserEventsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.retail.v2.PurgeUserEventsRequest(); } public static com.google.cloud.retail.v2.PurgeUserEventsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PurgeUserEventsRequest> PARSER = new com.google.protobuf.AbstractParser<PurgeUserEventsRequest>() { @java.lang.Override public PurgeUserEventsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PurgeUserEventsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PurgeUserEventsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.retail.v2.PurgeUserEventsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/systemds
34,713
src/main/java/org/apache/sysds/runtime/instructions/cp/EinsumCPInstruction.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.instructions.cp; import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Triple; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.sysds.common.Types.DataType; import org.apache.sysds.hops.LiteralOp; import org.apache.sysds.hops.OptimizerUtils; import org.apache.sysds.hops.codegen.SpoofCompiler; import org.apache.sysds.hops.codegen.cplan.CNode; import org.apache.sysds.hops.codegen.cplan.CNodeBinary; import org.apache.sysds.hops.codegen.cplan.CNodeCell; import org.apache.sysds.hops.codegen.cplan.CNodeData; import org.apache.sysds.hops.codegen.cplan.CNodeRow; import org.apache.sysds.runtime.codegen.*; import org.apache.sysds.runtime.compress.CompressedMatrixBlock; import org.apache.sysds.runtime.controlprogram.context.ExecutionContext; import org.apache.sysds.runtime.einsum.EinsumContext; import org.apache.sysds.runtime.functionobjects.*; import org.apache.sysds.runtime.matrix.data.LibMatrixMult; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.matrix.operators.AggregateOperator; import org.apache.sysds.runtime.matrix.operators.AggregateUnaryOperator; import org.apache.sysds.runtime.matrix.operators.BinaryOperator; import org.apache.sysds.runtime.matrix.operators.Operator; import org.apache.sysds.runtime.matrix.operators.ReorgOperator; import org.apache.sysds.runtime.matrix.operators.SimpleOperator; import java.util.*; import java.util.function.Predicate; public class EinsumCPInstruction extends BuiltinNaryCPInstruction { public static boolean FORCE_CELL_TPL = false; protected static final Log LOG = LogFactory.getLog(EinsumCPInstruction.class.getName()); public String eqStr; private final int _numThreads; private final CPOperand[] _in; public EinsumCPInstruction(Operator op, String opcode, String istr, CPOperand out, CPOperand... inputs) { super(op, opcode, istr, out, inputs); _numThreads = OptimizerUtils.getConstrainedNumThreads(-1); _in = inputs; this.eqStr = inputs[0].getName(); Logger.getLogger(EinsumCPInstruction.class).setLevel(Level.TRACE); } @SuppressWarnings("unused") private EinsumContext einc = null; @Override public void processInstruction(ExecutionContext ec) { //get input matrices and scalars, incl pinning of matrices ArrayList<MatrixBlock> inputs = new ArrayList<>(); for (CPOperand input : _in) { if(input.getDataType()==DataType.MATRIX){ MatrixBlock mb = ec.getMatrixInput(input.getName()); if(mb instanceof CompressedMatrixBlock){ mb = ((CompressedMatrixBlock) mb).getUncompressed("Spoof instruction"); } inputs.add(mb); } } EinsumContext einc = EinsumContext.getEinsumContext(eqStr, inputs); this.einc = einc; String resultString = einc.outChar2 != null ? String.valueOf(einc.outChar1) + einc.outChar2 : einc.outChar1 != null ? String.valueOf(einc.outChar1) : ""; if( LOG.isDebugEnabled() ) LOG.trace("outrows:"+einc.outRows+", outcols:"+einc.outCols); ArrayList<String> inputsChars = einc.newEquationStringInputsSplit; if(LOG.isTraceEnabled()) LOG.trace(String.join(",",einc.newEquationStringInputsSplit)); contractDimensionsAndComputeDiagonals(einc, inputs); //make all vetors col vectors for(int i = 0; i < inputs.size(); i++){ if(inputs.get(i) != null && inputsChars.get(i).length() == 1) EnsureMatrixBlockColumnVector(inputs.get(i)); } if(LOG.isTraceEnabled()) for(Character c : einc.characterAppearanceIndexes.keySet()){ ArrayList<Integer> a = einc.characterAppearanceIndexes.get(c); LOG.trace(c+" count= "+a.size()); } // compute scalar by suming-all matrices: Double scalar = null; for(int i=0;i< inputs.size(); i++){ String s = inputsChars.get(i); if(s.equals("")){ MatrixBlock mb = inputs.get(i); if (scalar == null) scalar = mb.get(0,0); else scalar*= mb.get(0,0); inputs.set(i,null); inputsChars.set(i,null); } } if (scalar != null) { inputsChars.add(""); inputs.add(new MatrixBlock(scalar)); } HashMap<Character, Integer> characterToOccurences = new HashMap<>(); for (Character key :einc.characterAppearanceIndexes.keySet()) { characterToOccurences.put(key, einc.characterAppearanceIndexes.get(key).size()); } for (Character key :einc.charToDimensionSize.keySet()) { if(!characterToOccurences.containsKey(key)) characterToOccurences.put(key, 1); } ArrayList<EOpNode> eOpNodes = new ArrayList<>(inputsChars.size()); for (int i = 0; i < inputsChars.size(); i++) { if (inputsChars.get(i) == null) continue; EOpNodeData n = new EOpNodeData(inputsChars.get(i).length() > 0 ? inputsChars.get(i).charAt(0) : null, inputsChars.get(i).length() > 1 ? inputsChars.get(i).charAt(1) : null, i); eOpNodes.add(n); } Pair<Integer, List<EOpNode> > plan = FORCE_CELL_TPL ? null : generatePlan(0, eOpNodes, einc.charToDimensionSize, characterToOccurences, einc.outChar1, einc.outChar2); ArrayList<MatrixBlock> resMatrices = FORCE_CELL_TPL ? null : executePlan(plan.getRight(), inputs); // ArrayList<MatrixBlock> resMatrices = executePlan(plan.getRight(), inputs, true); if(!FORCE_CELL_TPL && resMatrices.size() == 1){ EOpNode resNode = plan.getRight().get(0); if (einc.outChar1 != null && einc.outChar2 != null){ if(resNode.c1 == einc.outChar1 && resNode.c2 == einc.outChar2){ ec.setMatrixOutput(output.getName(), resMatrices.get(0)); } else if(resNode.c1 == einc.outChar2 && resNode.c2 == einc.outChar1){ ReorgOperator transpose = new ReorgOperator(SwapIndex.getSwapIndexFnObject(), _numThreads); MatrixBlock resM = resMatrices.get(0).reorgOperations(transpose, new MatrixBlock(),0,0,0); ec.setMatrixOutput(output.getName(), resM); }else{ if(LOG.isTraceEnabled()) LOG.trace("Einsum expected: "+resultString + ", got: "+resNode.c1+resNode.c2); throw new RuntimeException("Einsum plan produced different result"); } }else if (einc.outChar1 != null){ if(resNode.c1 == einc.outChar1 && resNode.c2 == null){ ec.setMatrixOutput(output.getName(), resMatrices.get(0)); }else{ if(LOG.isTraceEnabled()) LOG.trace("Einsum expected: "+resultString + ", got: "+resNode.c1+resNode.c2); throw new RuntimeException("Einsum plan produced different result"); } }else{ if(resNode.c1 == null && resNode.c2 == null){ ec.setScalarOutput(output.getName(), new DoubleObject(resMatrices.get(0).get(0, 0)));; } } }else{ // use cell template with loops for remaining ArrayList<MatrixBlock> mbs = resMatrices; ArrayList<String> chars = new ArrayList<>(); for (int i = 0; i < plan.getRight().size(); i++) { String s; if(plan.getRight().get(i).c1 == null) s = ""; else if(plan.getRight().get(i).c2 == null) s = plan.getRight().get(i).c1.toString(); else s = plan.getRight().get(i).c1.toString() + plan.getRight().get(i).c2; chars.add(s); } ArrayList<Character> summingChars = new ArrayList<>(); for (Character c : einc.characterAppearanceIndexes.keySet()) { if (c != einc.outChar1 && c != einc.outChar2) summingChars.add(c); } if(LOG.isTraceEnabled()) LOG.trace("finishing with cell tpl: "+String.join(",", chars)); MatrixBlock res = computeCellSummation(mbs, chars, resultString, einc.charToDimensionSize, summingChars, einc.outRows, einc.outCols); if (einc.outRows == 1 && einc.outCols == 1) ec.setScalarOutput(output.getName(), new DoubleObject(res.get(0, 0))); else ec.setMatrixOutput(output.getName(), res); } if(LOG.isTraceEnabled()) LOG.trace("EinsumCPInstruction Finished"); releaseMatrixInputs(ec); } private void contractDimensionsAndComputeDiagonals(EinsumContext einc, ArrayList<MatrixBlock> inputs) { for(int i = 0; i< einc.contractDims.length; i++){ //AggregateOperator agg = new AggregateOperator(0, KahanPlus.getKahanPlusFnObject(),Types.CorrectionLocationType.LASTCOLUMN); AggregateOperator agg = new AggregateOperator(0, Plus.getPlusFnObject()); if(einc.diagonalInputs[i]){ ReorgOperator op = new ReorgOperator(DiagIndex.getDiagIndexFnObject()); inputs.set(i, inputs.get(i).reorgOperations(op, new MatrixBlock(),0,0,0)); } if (einc.contractDims[i] == null) continue; switch (einc.contractDims[i]){ case CONTRACT_BOTH: { AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceAll.getReduceAllFnObject(), _numThreads); MatrixBlock res = new MatrixBlock(1, 1, false); inputs.get(i).aggregateUnaryOperations(aggun, res, 0, null); inputs.set(i, res); break; } case CONTRACT_RIGHT: { AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceCol.getReduceColFnObject(), _numThreads); MatrixBlock res = new MatrixBlock(inputs.get(i).getNumRows(), 1, false); inputs.get(i).aggregateUnaryOperations(aggun, res, 0, null); inputs.set(i, res); break; } case CONTRACT_LEFT: { AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceRow.getReduceRowFnObject(), _numThreads); MatrixBlock res = new MatrixBlock(inputs.get(i).getNumColumns(), 1, false); inputs.get(i).aggregateUnaryOperations(aggun, res, 0, null); inputs.set(i, res); break; } default: break; } } } private enum EBinaryOperand { // upper case: char has to remain, lower case: to be summed ////// summations: ////// aB_a,// -> B Ba_a, // -> B Ba_aC, // mmult -> BC aB_Ca, Ba_Ca, // -> BC aB_aC, // outer mult, possibly with transposing first -> BC a_a,// dot -> ////// elementwisemult and sums, something like ij,ij->i ////// aB_aB,// elemwise and colsum -> B Ba_Ba, // elemwise and rowsum ->B Ba_aB, // elemwise, either colsum or rowsum -> B // aB_Ba, ////// elementwise, no summations: ////// A_A,// v-elemwise -> A AB_AB,// M-M elemwise -> AB AB_BA, // M-M.T elemwise -> AB AB_A, // M-v colwise -> BA!? BA_A, // M-v rowwise -> BA ab_ab,//M-M sum all ab_ba, //M-M.T sum all ////// other ////// A_B, // outer mult -> AB A_scalar, // v-scalar AB_scalar, // m-scalar scalar_scalar } private abstract class EOpNode { public Character c1; public Character c2; // nullable public EOpNode(Character c1, Character c2){ this.c1 = c1; this.c2 = c2; } } private class EOpNodeBinary extends EOpNode { public EOpNode left; public EOpNode right; public EBinaryOperand operand; public EOpNodeBinary(Character c1, Character c2, EOpNode left, EOpNode right, EBinaryOperand operand){ super(c1,c2); this.left = left; this.right = right; this.operand = operand; } } private class EOpNodeData extends EOpNode { public int matrixIdx; public EOpNodeData(Character c1, Character c2, int matrixIdx){ super(c1,c2); this.matrixIdx = matrixIdx; } } private Pair<Integer, List<EOpNode> /* ideally with one element */> generatePlan(int cost, ArrayList<EOpNode> operands, HashMap<Character, Integer> charToSizeMap, HashMap<Character, Integer> charToOccurences, Character outChar1, Character outChar2) { Integer minCost = cost; List<EOpNode> minNodes = operands; if (operands.size() == 2){ boolean swap = (operands.get(0).c2 == null && operands.get(1).c2 != null) || operands.get(0).c1 == null; EOpNode n1 = operands.get(!swap ? 0 : 1); EOpNode n2 = operands.get(!swap ? 1 : 0); Triple<Integer, EBinaryOperand, Pair<Character, Character>> t = TryCombineAndCost(n1, n2, charToSizeMap, charToOccurences, outChar1, outChar2); if (t != null) { EOpNodeBinary newNode = new EOpNodeBinary(t.getRight().getLeft(), t.getRight().getRight(), n1, n2, t.getMiddle()); int thisCost = cost + t.getLeft(); return Pair.of(thisCost, Arrays.asList(newNode)); } return Pair.of(cost, operands); } else if (operands.size() == 1){ // check for transpose return Pair.of(cost, operands); } for(int i = 0; i < operands.size()-1; i++){ for (int j = i+1; j < operands.size(); j++){ boolean swap = (operands.get(i).c2 == null && operands.get(j).c2 != null) || operands.get(i).c1 == null; EOpNode n1 = operands.get(!swap ? i : j); EOpNode n2 = operands.get(!swap ? j : i); Triple<Integer, EBinaryOperand, Pair<Character, Character>> t = TryCombineAndCost(n1, n2, charToSizeMap, charToOccurences, outChar1, outChar2); if (t != null){ EOpNodeBinary newNode = new EOpNodeBinary(t.getRight().getLeft(), t.getRight().getRight(), n1, n2, t.getMiddle()); int thisCost = cost + t.getLeft(); if(n1.c1 != null) charToOccurences.put(n1.c1, charToOccurences.get(n1.c1)-1); if(n1.c2 != null) charToOccurences.put(n1.c2, charToOccurences.get(n1.c2)-1); if(n2.c1 != null) charToOccurences.put(n2.c1, charToOccurences.get(n2.c1)-1); if(n2.c2 != null) charToOccurences.put(n2.c2, charToOccurences.get(n2.c2)-1); if(newNode.c1 != null) charToOccurences.put(newNode.c1, charToOccurences.get(newNode.c1)+1); if(newNode.c2 != null) charToOccurences.put(newNode.c2, charToOccurences.get(newNode.c2)+1); ArrayList<EOpNode> newOperands = new ArrayList<>(operands.size()-1); for(int z = 0; z < operands.size(); z++){ if(z != i && z != j) newOperands.add(operands.get(z)); } newOperands.add(newNode); Pair<Integer, List<EOpNode>> furtherPlan = generatePlan(thisCost, newOperands,charToSizeMap, charToOccurences, outChar1, outChar2); if(furtherPlan.getRight().size() < (minNodes.size()) || furtherPlan.getLeft() < minCost){ minCost = furtherPlan.getLeft(); minNodes = furtherPlan.getRight(); } if(n1.c1 != null) charToOccurences.put(n1.c1, charToOccurences.get(n1.c1)+1); if(n1.c2 != null) charToOccurences.put(n1.c2, charToOccurences.get(n1.c2)+1); if(n2.c1 != null) charToOccurences.put(n2.c1, charToOccurences.get(n2.c1)+1); if(n2.c2 != null) charToOccurences.put(n2.c2, charToOccurences.get(n2.c2)+1); if(newNode.c1 != null) charToOccurences.put(newNode.c1, charToOccurences.get(newNode.c1)-1); if(newNode.c2 != null) charToOccurences.put(newNode.c2, charToOccurences.get(newNode.c2)-1); } } } return Pair.of(minCost, minNodes); } private static Triple<Integer, EBinaryOperand, Pair<Character, Character>> TryCombineAndCost(EOpNode n1 , EOpNode n2, HashMap<Character, Integer> charToSizeMap, HashMap<Character, Integer> charToOccurences, Character outChar1, Character outChar2){ Predicate<Character> cannotBeSummed = (c) -> c == outChar1 || c == outChar2 || charToOccurences.get(c) > 2; if(n1.c1 == null) { // n2.c1 also has to be null return Triple.of(1, EBinaryOperand.scalar_scalar, Pair.of(null, null)); } if(n2.c1 == null) { if(n1.c2 == null) return Triple.of(charToSizeMap.get(n1.c1), EBinaryOperand.A_scalar, Pair.of(n1.c1, null)); return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.AB_scalar, Pair.of(n1.c1, n1.c2)); } if(n1.c1 == n2.c1){ if(n1.c2 != null){ if ( n1.c2 == n2.c2){ if( cannotBeSummed.test(n1.c1)){ if(cannotBeSummed.test(n1.c2)){ return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.AB_AB, Pair.of(n1.c1, n1.c2)); } return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.Ba_Ba, Pair.of(n1.c1, null)); } if(cannotBeSummed.test(n1.c2)){ return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.aB_aB, Pair.of(n1.c2, null)); } return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.ab_ab, Pair.of(null, null)); } else if(n2.c2 == null){ if(cannotBeSummed.test(n1.c1)){ return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2)*2, EBinaryOperand.AB_A, Pair.of(n1.c1, n1.c2)); } return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2)*2, EBinaryOperand.aB_a, Pair.of(n1.c2, null)); // in theory (null, n1.c2) } else if(n1.c1 ==outChar1 || n1.c1==outChar2|| charToOccurences.get(n1.c1) > 2){ return null;// AB,AC } else { return Triple.of((charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2))+(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2)*charToSizeMap.get(n2.c2)), EBinaryOperand.aB_aC, Pair.of(n1.c2, n2.c2)); // or n2.c2, n1.c2 } }else{ // n1.c2 = null -> c2.c2 = null if(n1.c1 ==outChar1 || n1.c1==outChar2 || charToOccurences.get(n1.c1) > 2){ return Triple.of(charToSizeMap.get(n1.c1), EBinaryOperand.A_A, Pair.of(n1.c1, null)); } return Triple.of(charToSizeMap.get(n1.c1), EBinaryOperand.a_a, Pair.of(null, null)); } }else{ // n1.c1 != n2.c1 if(n1.c2 == null) { return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n2.c1), EBinaryOperand.A_B, Pair.of(n1.c1, n2.c1)); } else if(n2.c2 == null) { // ab,c if (n1.c2 == n2.c1) { if(cannotBeSummed.test(n1.c2)){ return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n2.c1), EBinaryOperand.BA_A, Pair.of(n1.c1, n1.c2)); } return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n2.c1), EBinaryOperand.Ba_a, Pair.of(n1.c1, null)); } return null; // AB,C } else if (n1.c2 == n2.c1) { if(n1.c1 == n2.c2){ // ab,ba if(cannotBeSummed.test(n1.c1)){ if(cannotBeSummed.test(n1.c2)){ return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.AB_BA, Pair.of(n1.c1, n1.c2)); } return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.Ba_aB, Pair.of(n1.c1, null)); } return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.ab_ba, Pair.of(null, null)); } if(cannotBeSummed.test(n1.c2)){ return null; // AB_B }else{ return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2)*charToSizeMap.get(n2.c2), EBinaryOperand.Ba_aC, Pair.of(n1.c1, n2.c2)); // if(n1.c1 ==outChar1 || n1.c1==outChar2|| charToOccurences.get(n1.c1) > 2){ // return null; // AB_B // } // return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2), EBinaryOperand.Ba_a, Pair.of(n1.c1, null)); } } if(n1.c1 == n2.c2) { if(cannotBeSummed.test(n1.c1)){ return null; // AB_B } return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2)*charToSizeMap.get(n2.c1), EBinaryOperand.aB_Ca, Pair.of(n2.c1, n1.c2)); // * its just reorder of mmult } else if (n1.c2 == n2.c2) { if(n1.c2 ==outChar1 || n1.c2==outChar2|| charToOccurences.get(n1.c2) > 2){ return null; // BA_CA }else{ return Triple.of(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2) +(charToSizeMap.get(n1.c1)*charToSizeMap.get(n1.c2)*charToSizeMap.get(n2.c1)), EBinaryOperand.Ba_Ca, Pair.of(n1.c1, n2.c1)); // or n2.c1, n1.c1 } } else { // we have something like ab,cd return null; } } } private ArrayList<MatrixBlock /* #els = #els of plan */> executePlan(List<EOpNode> plan, ArrayList<MatrixBlock> inputs){ return executePlan(plan, inputs, false); } private ArrayList<MatrixBlock /* #els = #els of plan */> executePlan(List<EOpNode> plan, ArrayList<MatrixBlock> inputs, boolean codegen) { ArrayList<MatrixBlock> res = new ArrayList<>(plan.size()); for(EOpNode p : plan){ if(codegen) res.add(ComputeEOpNodeCodegen(p, inputs)); else res.add(ComputeEOpNode(p, inputs)); } return res; } private MatrixBlock ComputeEOpNode(EOpNode eOpNode, ArrayList<MatrixBlock> inputs){ if(eOpNode instanceof EOpNodeData eOpNodeData){ return inputs.get(eOpNodeData.matrixIdx); } EOpNodeBinary bin = (EOpNodeBinary) eOpNode; MatrixBlock left = ComputeEOpNode(bin.left, inputs); MatrixBlock right = ComputeEOpNode(bin.right, inputs); AggregateOperator agg = new AggregateOperator(0, Plus.getPlusFnObject()); MatrixBlock res; switch (bin.operand){ case AB_AB -> { res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); } case A_A -> { EnsureMatrixBlockColumnVector(left); EnsureMatrixBlockColumnVector(right); res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); } case a_a -> { EnsureMatrixBlockColumnVector(left); EnsureMatrixBlockColumnVector(right); res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceAll.getReduceAllFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); } //////////// case Ba_Ba -> { res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceCol.getReduceColFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); } case aB_aB -> { res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceRow.getReduceRowFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); EnsureMatrixBlockColumnVector(res); } case ab_ab -> { res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceAll.getReduceAllFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); } case ab_ba -> { ReorgOperator transpose = new ReorgOperator(SwapIndex.getSwapIndexFnObject(), _numThreads); right = right.reorgOperations(transpose, new MatrixBlock(), 0, 0, 0); res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceAll.getReduceAllFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); } case Ba_aB -> { ReorgOperator transpose = new ReorgOperator(SwapIndex.getSwapIndexFnObject(), _numThreads); right = right.reorgOperations(transpose, new MatrixBlock(), 0, 0, 0); res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceCol.getReduceColFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); } ///////// case AB_BA -> { ReorgOperator transpose = new ReorgOperator(SwapIndex.getSwapIndexFnObject(), _numThreads); right = right.reorgOperations(transpose, new MatrixBlock(), 0, 0, 0); res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left, right},new ScalarObject[]{}, new MatrixBlock()); } case Ba_aC -> { res = LibMatrixMult.matrixMult(left,right, new MatrixBlock(), _numThreads); } case aB_Ca -> { res = LibMatrixMult.matrixMult(right,left, new MatrixBlock(), _numThreads); } case Ba_Ca -> { ReorgOperator transpose = new ReorgOperator(SwapIndex.getSwapIndexFnObject(), _numThreads); right = right.reorgOperations(transpose, new MatrixBlock(), 0, 0, 0); res = LibMatrixMult.matrixMult(left,right, new MatrixBlock(), _numThreads); } case aB_aC -> { ReorgOperator transpose = new ReorgOperator(SwapIndex.getSwapIndexFnObject(), _numThreads); left = left.reorgOperations(transpose, new MatrixBlock(), 0, 0, 0); res = LibMatrixMult.matrixMult(left,right, new MatrixBlock(), _numThreads); } case A_scalar, AB_scalar -> { res = MatrixBlock.naryOperations(new SimpleOperator(Multiply.getMultiplyFnObject()), new MatrixBlock[]{left},new ScalarObject[]{new DoubleObject(right.get(0,0))}, new MatrixBlock()); } case BA_A -> { EnsureMatrixBlockRowVector(right); res = left.binaryOperations(new BinaryOperator(Multiply.getMultiplyFnObject()), right); } case Ba_a -> { EnsureMatrixBlockRowVector(right); res = left.binaryOperations(new BinaryOperator(Multiply.getMultiplyFnObject()), right); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceCol.getReduceColFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); } case AB_A -> { EnsureMatrixBlockColumnVector(right); res = left.binaryOperations(new BinaryOperator(Multiply.getMultiplyFnObject()), right); } case aB_a -> { EnsureMatrixBlockColumnVector(right); res = left.binaryOperations(new BinaryOperator(Multiply.getMultiplyFnObject()), right); AggregateUnaryOperator aggun = new AggregateUnaryOperator(agg, ReduceRow.getReduceRowFnObject(), _numThreads); res = (MatrixBlock) res.aggregateUnaryOperations(aggun, new MatrixBlock(), 0, null); EnsureMatrixBlockColumnVector(res); } case A_B -> { EnsureMatrixBlockColumnVector(left); EnsureMatrixBlockRowVector(right); res = left.binaryOperations(new BinaryOperator(Multiply.getMultiplyFnObject()), right); } case scalar_scalar -> { return new MatrixBlock(left.get(0,0)*right.get(0,0)); } default -> { throw new IllegalArgumentException("Unexpected value: " + bin.operand.toString()); } } return res; } private static MatrixBlock ComputeEOpNodeCodegen(EOpNode eOpNode, ArrayList<MatrixBlock> inputs){ return rComputeEOpNodeCodegen(eOpNode, inputs); // throw new NotImplementedException(); } private static CNodeData MatrixBlockToCNodeData(MatrixBlock mb, int id){ return new CNodeData("ce"+id, id, mb.getNumRows(), mb.getNumColumns(), DataType.MATRIX); } private static MatrixBlock rComputeEOpNodeCodegen(EOpNode eOpNode, ArrayList<MatrixBlock> inputs) { if (eOpNode instanceof EOpNodeData eOpNodeData){ return inputs.get(eOpNodeData.matrixIdx); // return new CNodeData("ce"+eOpNodeData.matrixIdx, eOpNodeData.matrixIdx, inputs.get(eOpNodeData.matrixIdx).getNumRows(), inputs.get(eOpNodeData.matrixIdx).getNumColumns(), DataType.MATRIX); } EOpNodeBinary bin = (EOpNodeBinary) eOpNode; // CNodeData dataLeft = null; // if (bin.left instanceof EOpNodeData eOpNodeData) dataLeft = new CNodeData("ce"+eOpNodeData.matrixIdx, eOpNodeData.matrixIdx, inputs.get(eOpNodeData.matrixIdx).getNumRows(), inputs.get(eOpNodeData.matrixIdx).getNumColumns(), DataType.MATRIX); // CNodeData dataRight = null; // if (bin.right instanceof EOpNodeData eOpNodeData) dataRight = new CNodeData("ce"+eOpNodeData.matrixIdx, eOpNodeData.matrixIdx, inputs.get(eOpNodeData.matrixIdx).getNumRows(), inputs.get(eOpNodeData.matrixIdx).getNumColumns(), DataType.MATRIX); if(bin.operand == EBinaryOperand.AB_AB){ if (bin.right instanceof EOpNodeBinary rBinary && rBinary.operand == EBinaryOperand.AB_AB){ MatrixBlock left = rComputeEOpNodeCodegen(bin.left, inputs); MatrixBlock right1 = rComputeEOpNodeCodegen(((EOpNodeBinary) bin.right).left, inputs); MatrixBlock right2 = rComputeEOpNodeCodegen(((EOpNodeBinary) bin.right).right, inputs); CNodeData d0 = MatrixBlockToCNodeData(left, 0); CNodeData d1 = MatrixBlockToCNodeData(right1, 1); CNodeData d2 = MatrixBlockToCNodeData(right2, 2); // CNodeNary nary = new CNodeNary(cnodeIn, CNodeNary.NaryType.) CNodeBinary rightBinary = new CNodeBinary(d1, d2, CNodeBinary.BinType.VECT_MULT); CNodeBinary cNodeBinary = new CNodeBinary(d0, rightBinary, CNodeBinary.BinType.VECT_MULT); ArrayList<CNode> cnodeIn = new ArrayList<>(); cnodeIn.add(d0); cnodeIn.add(d1); cnodeIn.add(d2); CNodeRow cnode = new CNodeRow(cnodeIn, cNodeBinary); cnode.setRowType(SpoofRowwise.RowType.NO_AGG); cnode.renameInputs(); String src = cnode.codegen(false, SpoofCompiler.GeneratorAPI.JAVA); if( LOG.isTraceEnabled()) LOG.trace(CodegenUtils.printWithLineNumber(src)); Class<?> cla = CodegenUtils.compileClass("codegen." + cnode.getClassname(), src); SpoofOperator op = CodegenUtils.createInstance(cla); MatrixBlock mb = new MatrixBlock(); ArrayList<ScalarObject> scalars = new ArrayList<>(); ArrayList<MatrixBlock> mbs = new ArrayList<>(3); mbs.add(left); mbs.add(right1); mbs.add(right2); MatrixBlock out = op.execute(mbs, scalars, mb, 6); return out; } } throw new NotImplementedException(); } private void releaseMatrixInputs(ExecutionContext ec){ for (CPOperand input : _in) if(input.getDataType()==DataType.MATRIX) ec.releaseMatrixInput(input.getName()); //todo release other } private static void EnsureMatrixBlockColumnVector(MatrixBlock mb){ if(mb.getNumColumns() > 1){ mb.setNumRows(mb.getNumColumns()); mb.setNumColumns(1); mb.getDenseBlock().resetNoFill(mb.getNumRows(),1); } } private static void EnsureMatrixBlockRowVector(MatrixBlock mb){ if(mb.getNumRows() > 1){ mb.setNumColumns(mb.getNumRows()); mb.setNumRows(1); mb.getDenseBlock().resetNoFill(1,mb.getNumColumns()); } } private static void indent(StringBuilder sb, int level) { for (int i = 0; i < level; i++) { sb.append(" "); } } private MatrixBlock computeCellSummation(ArrayList<MatrixBlock> inputs, List<String> inputsChars, String resultString, HashMap<Character, Integer> charToDimensionSizeInt, List<Character> summingChars, int outRows, int outCols){ ArrayList<CNode> cnodeIn = new ArrayList<>(); cnodeIn.add(new CNodeData(new LiteralOp(3), 0, 0, DataType.SCALAR)); CNodeCell cnode = new CNodeCell(cnodeIn, null); StringBuilder sb = new StringBuilder(); int indent = 2; indent(sb, indent); boolean needsSumming = summingChars.stream().anyMatch(x -> x != null); String itVar0 = cnode.createVarname(); String outVar = itVar0; if (needsSumming) { sb.append("double "); sb.append(outVar); sb.append("=0;\n"); } Iterator<Character> hsIt = summingChars.iterator(); while (hsIt.hasNext()) { indent(sb, indent); indent++; Character c = hsIt.next(); String itVar = itVar0 + c; sb.append("for(int "); sb.append(itVar); sb.append("=0;"); sb.append(itVar); sb.append("<"); sb.append(charToDimensionSizeInt.get(c)); sb.append(";"); sb.append(itVar); sb.append("++){\n"); } indent(sb, indent); if (needsSumming) { sb.append(outVar); sb.append("+="); } for (int i = 0; i < inputsChars.size(); i++) { if (inputsChars.get(i).length() == 0){ sb.append("getValue(b["); sb.append(i); sb.append("],b["); sb.append(i); sb.append("].clen, 0,"); } else if (summingChars.contains(inputsChars.get(i).charAt(0))) { sb.append("getValue(b["); sb.append(i); sb.append("],b["); sb.append(i); sb.append("].clen,"); sb.append(itVar0); sb.append(inputsChars.get(i).charAt(0)); sb.append(","); } else if (resultString.length() >= 1 && inputsChars.get(i).charAt(0) == resultString.charAt(0)) { sb.append("getValue(b["); sb.append(i); sb.append("],b["); sb.append(i); sb.append("].clen, rix,"); } else if (resultString.length() == 2 && inputsChars.get(i).charAt(0) == resultString.charAt(1)) { sb.append("getValue(b["); sb.append(i); sb.append("],b["); sb.append(i); sb.append("].clen, cix,"); } else { sb.append("getValue(b["); sb.append(i); sb.append("],b["); sb.append(i); sb.append("].clen, 0,"); } if (inputsChars.get(i).length() != 2){ sb.append("0)"); } else if (summingChars.contains(inputsChars.get(i).charAt(1))) { sb.append(itVar0); sb.append(inputsChars.get(i).charAt(1)); sb.append(")"); } else if (resultString.length() >= 1 &&inputsChars.get(i).charAt(1) == resultString.charAt(0)) { sb.append("rix)"); } else if (resultString.length() == 2 && inputsChars.get(i).charAt(1) == resultString.charAt(1)) { sb.append("cix)"); } else { sb.append("0)"); } if (i < inputsChars.size() - 1) { sb.append(" * "); } } if (needsSumming) { sb.append(";\n"); } indent--; for (int si = 0; si < summingChars.size(); si++) { indent(sb, indent); indent--; sb.append("}\n"); } String src = CNodeCell.JAVA_TEMPLATE;// src = src.replace("%TMP%", cnode.createVarname()); src = src.replace("%TYPE%", "NO_AGG"); src = src.replace("%SPARSE_SAFE%", "false"); src = src.replace("%SEQ%", "true"); src = src.replace("%AGG_OP_NAME%", "null"); if (needsSumming) { src = src.replace("%BODY_dense%", sb.toString()); src = src.replace("%OUT%", outVar); } else { src = src.replace("%BODY_dense%", ""); src = src.replace("%OUT%", sb.toString()); } if( LOG.isTraceEnabled()) LOG.trace(src); Class<?> cla = CodegenUtils.compileClass("codegen." + cnode.getClassname(), src); SpoofOperator op = CodegenUtils.createInstance(cla); MatrixBlock resBlock = new MatrixBlock(); resBlock.reset(outRows, outCols); inputs.add(0, resBlock); MatrixBlock out = op.execute(inputs, new ArrayList<>(), new MatrixBlock(), _numThreads); return out; } public CPOperand[] getInputs() { return _in; } }
googleapis/google-cloud-java
36,738
java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ListDocumentsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1/document_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1; /** * * * <pre> * Request message for * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.ListDocumentsRequest} */ public final class ListDocumentsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ListDocumentsRequest) ListDocumentsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListDocumentsRequest.newBuilder() to construct. private ListDocumentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDocumentsRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDocumentsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.DocumentServiceProto .internal_static_google_cloud_discoveryengine_v1_ListDocumentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.DocumentServiceProto .internal_static_google_cloud_discoveryengine_v1_ListDocumentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.ListDocumentsRequest.class, com.google.cloud.discoveryengine.v1.ListDocumentsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent branch resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`. * Use `default_branch` as the branch ID, to list documents under the default * branch. * * If the caller does not have permission to list * [Document][google.cloud.discoveryengine.v1.Document]s under this branch, * regardless of whether or not this branch exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent branch resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`. * Use `default_branch` as the branch ID, to list documents under the default * branch. * * If the caller does not have permission to list * [Document][google.cloud.discoveryengine.v1.Document]s under this branch, * regardless of whether or not this branch exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Maximum number of [Document][google.cloud.discoveryengine.v1.Document]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 are set to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], * received from a previous * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token * [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], * received from a previous * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1.ListDocumentsRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1.ListDocumentsRequest other = (com.google.cloud.discoveryengine.v1.ListDocumentsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1.ListDocumentsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.ListDocumentsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ListDocumentsRequest) com.google.cloud.discoveryengine.v1.ListDocumentsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.DocumentServiceProto .internal_static_google_cloud_discoveryengine_v1_ListDocumentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.DocumentServiceProto .internal_static_google_cloud_discoveryengine_v1_ListDocumentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.ListDocumentsRequest.class, com.google.cloud.discoveryengine.v1.ListDocumentsRequest.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1.ListDocumentsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1.DocumentServiceProto .internal_static_google_cloud_discoveryengine_v1_ListDocumentsRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1.ListDocumentsRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1.ListDocumentsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1.ListDocumentsRequest build() { com.google.cloud.discoveryengine.v1.ListDocumentsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1.ListDocumentsRequest buildPartial() { com.google.cloud.discoveryengine.v1.ListDocumentsRequest result = new com.google.cloud.discoveryengine.v1.ListDocumentsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.discoveryengine.v1.ListDocumentsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1.ListDocumentsRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1.ListDocumentsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1.ListDocumentsRequest other) { if (other == com.google.cloud.discoveryengine.v1.ListDocumentsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent branch resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`. * Use `default_branch` as the branch ID, to list documents under the default * branch. * * If the caller does not have permission to list * [Document][google.cloud.discoveryengine.v1.Document]s under this branch, * regardless of whether or not this branch exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent branch resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`. * Use `default_branch` as the branch ID, to list documents under the default * branch. * * If the caller does not have permission to list * [Document][google.cloud.discoveryengine.v1.Document]s under this branch, * regardless of whether or not this branch exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent branch resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`. * Use `default_branch` as the branch ID, to list documents under the default * branch. * * If the caller does not have permission to list * [Document][google.cloud.discoveryengine.v1.Document]s under this branch, * regardless of whether or not this branch exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent branch resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`. * Use `default_branch` as the branch ID, to list documents under the default * branch. * * If the caller does not have permission to list * [Document][google.cloud.discoveryengine.v1.Document]s under this branch, * regardless of whether or not this branch exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent branch resource name, such as * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`. * Use `default_branch` as the branch ID, to list documents under the default * branch. * * If the caller does not have permission to list * [Document][google.cloud.discoveryengine.v1.Document]s under this branch, * regardless of whether or not this branch exists, a `PERMISSION_DENIED` * error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Maximum number of [Document][google.cloud.discoveryengine.v1.Document]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 are set to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Maximum number of [Document][google.cloud.discoveryengine.v1.Document]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 are set to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Maximum number of [Document][google.cloud.discoveryengine.v1.Document]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 are set to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], * received from a previous * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token * [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], * received from a previous * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token * [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], * received from a previous * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A page token * [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], * received from a previous * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A page token * [ListDocumentsResponse.next_page_token][google.cloud.discoveryengine.v1.ListDocumentsResponse.next_page_token], * received from a previous * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [DocumentService.ListDocuments][google.cloud.discoveryengine.v1.DocumentService.ListDocuments] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ListDocumentsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ListDocumentsRequest) private static final com.google.cloud.discoveryengine.v1.ListDocumentsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ListDocumentsRequest(); } public static com.google.cloud.discoveryengine.v1.ListDocumentsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDocumentsRequest> PARSER = new com.google.protobuf.AbstractParser<ListDocumentsRequest>() { @java.lang.Override public ListDocumentsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDocumentsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDocumentsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1.ListDocumentsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,696
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ListJobTriggersResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * Response message for ListJobTriggers. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ListJobTriggersResponse} */ public final class ListJobTriggersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ListJobTriggersResponse) ListJobTriggersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListJobTriggersResponse.newBuilder() to construct. private ListJobTriggersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListJobTriggersResponse() { jobTriggers_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListJobTriggersResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListJobTriggersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListJobTriggersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ListJobTriggersResponse.class, com.google.privacy.dlp.v2.ListJobTriggersResponse.Builder.class); } public static final int JOB_TRIGGERS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.privacy.dlp.v2.JobTrigger> jobTriggers_; /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ @java.lang.Override public java.util.List<com.google.privacy.dlp.v2.JobTrigger> getJobTriggersList() { return jobTriggers_; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.privacy.dlp.v2.JobTriggerOrBuilder> getJobTriggersOrBuilderList() { return jobTriggers_; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ @java.lang.Override public int getJobTriggersCount() { return jobTriggers_.size(); } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.JobTrigger getJobTriggers(int index) { return jobTriggers_.get(index); } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.JobTriggerOrBuilder getJobTriggersOrBuilder(int index) { return jobTriggers_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * If the next page is available then this value is the next page token to be * used in the following ListJobTriggers request. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * If the next page is available then this value is the next page token to be * used in the following ListJobTriggers request. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < jobTriggers_.size(); i++) { output.writeMessage(1, jobTriggers_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < jobTriggers_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, jobTriggers_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.ListJobTriggersResponse)) { return super.equals(obj); } com.google.privacy.dlp.v2.ListJobTriggersResponse other = (com.google.privacy.dlp.v2.ListJobTriggersResponse) obj; if (!getJobTriggersList().equals(other.getJobTriggersList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getJobTriggersCount() > 0) { hash = (37 * hash) + JOB_TRIGGERS_FIELD_NUMBER; hash = (53 * hash) + getJobTriggersList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.ListJobTriggersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListJobTriggers. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ListJobTriggersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ListJobTriggersResponse) com.google.privacy.dlp.v2.ListJobTriggersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListJobTriggersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListJobTriggersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ListJobTriggersResponse.class, com.google.privacy.dlp.v2.ListJobTriggersResponse.Builder.class); } // Construct using com.google.privacy.dlp.v2.ListJobTriggersResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (jobTriggersBuilder_ == null) { jobTriggers_ = java.util.Collections.emptyList(); } else { jobTriggers_ = null; jobTriggersBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListJobTriggersResponse_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.ListJobTriggersResponse getDefaultInstanceForType() { return com.google.privacy.dlp.v2.ListJobTriggersResponse.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.ListJobTriggersResponse build() { com.google.privacy.dlp.v2.ListJobTriggersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.ListJobTriggersResponse buildPartial() { com.google.privacy.dlp.v2.ListJobTriggersResponse result = new com.google.privacy.dlp.v2.ListJobTriggersResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.privacy.dlp.v2.ListJobTriggersResponse result) { if (jobTriggersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { jobTriggers_ = java.util.Collections.unmodifiableList(jobTriggers_); bitField0_ = (bitField0_ & ~0x00000001); } result.jobTriggers_ = jobTriggers_; } else { result.jobTriggers_ = jobTriggersBuilder_.build(); } } private void buildPartial0(com.google.privacy.dlp.v2.ListJobTriggersResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.ListJobTriggersResponse) { return mergeFrom((com.google.privacy.dlp.v2.ListJobTriggersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.ListJobTriggersResponse other) { if (other == com.google.privacy.dlp.v2.ListJobTriggersResponse.getDefaultInstance()) return this; if (jobTriggersBuilder_ == null) { if (!other.jobTriggers_.isEmpty()) { if (jobTriggers_.isEmpty()) { jobTriggers_ = other.jobTriggers_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureJobTriggersIsMutable(); jobTriggers_.addAll(other.jobTriggers_); } onChanged(); } } else { if (!other.jobTriggers_.isEmpty()) { if (jobTriggersBuilder_.isEmpty()) { jobTriggersBuilder_.dispose(); jobTriggersBuilder_ = null; jobTriggers_ = other.jobTriggers_; bitField0_ = (bitField0_ & ~0x00000001); jobTriggersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getJobTriggersFieldBuilder() : null; } else { jobTriggersBuilder_.addAllMessages(other.jobTriggers_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.privacy.dlp.v2.JobTrigger m = input.readMessage( com.google.privacy.dlp.v2.JobTrigger.parser(), extensionRegistry); if (jobTriggersBuilder_ == null) { ensureJobTriggersIsMutable(); jobTriggers_.add(m); } else { jobTriggersBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.privacy.dlp.v2.JobTrigger> jobTriggers_ = java.util.Collections.emptyList(); private void ensureJobTriggersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { jobTriggers_ = new java.util.ArrayList<com.google.privacy.dlp.v2.JobTrigger>(jobTriggers_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.JobTrigger, com.google.privacy.dlp.v2.JobTrigger.Builder, com.google.privacy.dlp.v2.JobTriggerOrBuilder> jobTriggersBuilder_; /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public java.util.List<com.google.privacy.dlp.v2.JobTrigger> getJobTriggersList() { if (jobTriggersBuilder_ == null) { return java.util.Collections.unmodifiableList(jobTriggers_); } else { return jobTriggersBuilder_.getMessageList(); } } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public int getJobTriggersCount() { if (jobTriggersBuilder_ == null) { return jobTriggers_.size(); } else { return jobTriggersBuilder_.getCount(); } } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public com.google.privacy.dlp.v2.JobTrigger getJobTriggers(int index) { if (jobTriggersBuilder_ == null) { return jobTriggers_.get(index); } else { return jobTriggersBuilder_.getMessage(index); } } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder setJobTriggers(int index, com.google.privacy.dlp.v2.JobTrigger value) { if (jobTriggersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureJobTriggersIsMutable(); jobTriggers_.set(index, value); onChanged(); } else { jobTriggersBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder setJobTriggers( int index, com.google.privacy.dlp.v2.JobTrigger.Builder builderForValue) { if (jobTriggersBuilder_ == null) { ensureJobTriggersIsMutable(); jobTriggers_.set(index, builderForValue.build()); onChanged(); } else { jobTriggersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder addJobTriggers(com.google.privacy.dlp.v2.JobTrigger value) { if (jobTriggersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureJobTriggersIsMutable(); jobTriggers_.add(value); onChanged(); } else { jobTriggersBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder addJobTriggers(int index, com.google.privacy.dlp.v2.JobTrigger value) { if (jobTriggersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureJobTriggersIsMutable(); jobTriggers_.add(index, value); onChanged(); } else { jobTriggersBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder addJobTriggers(com.google.privacy.dlp.v2.JobTrigger.Builder builderForValue) { if (jobTriggersBuilder_ == null) { ensureJobTriggersIsMutable(); jobTriggers_.add(builderForValue.build()); onChanged(); } else { jobTriggersBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder addJobTriggers( int index, com.google.privacy.dlp.v2.JobTrigger.Builder builderForValue) { if (jobTriggersBuilder_ == null) { ensureJobTriggersIsMutable(); jobTriggers_.add(index, builderForValue.build()); onChanged(); } else { jobTriggersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder addAllJobTriggers( java.lang.Iterable<? extends com.google.privacy.dlp.v2.JobTrigger> values) { if (jobTriggersBuilder_ == null) { ensureJobTriggersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, jobTriggers_); onChanged(); } else { jobTriggersBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder clearJobTriggers() { if (jobTriggersBuilder_ == null) { jobTriggers_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { jobTriggersBuilder_.clear(); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public Builder removeJobTriggers(int index) { if (jobTriggersBuilder_ == null) { ensureJobTriggersIsMutable(); jobTriggers_.remove(index); onChanged(); } else { jobTriggersBuilder_.remove(index); } return this; } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public com.google.privacy.dlp.v2.JobTrigger.Builder getJobTriggersBuilder(int index) { return getJobTriggersFieldBuilder().getBuilder(index); } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public com.google.privacy.dlp.v2.JobTriggerOrBuilder getJobTriggersOrBuilder(int index) { if (jobTriggersBuilder_ == null) { return jobTriggers_.get(index); } else { return jobTriggersBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public java.util.List<? extends com.google.privacy.dlp.v2.JobTriggerOrBuilder> getJobTriggersOrBuilderList() { if (jobTriggersBuilder_ != null) { return jobTriggersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(jobTriggers_); } } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public com.google.privacy.dlp.v2.JobTrigger.Builder addJobTriggersBuilder() { return getJobTriggersFieldBuilder() .addBuilder(com.google.privacy.dlp.v2.JobTrigger.getDefaultInstance()); } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public com.google.privacy.dlp.v2.JobTrigger.Builder addJobTriggersBuilder(int index) { return getJobTriggersFieldBuilder() .addBuilder(index, com.google.privacy.dlp.v2.JobTrigger.getDefaultInstance()); } /** * * * <pre> * List of triggeredJobs, up to page_size in ListJobTriggersRequest. * </pre> * * <code>repeated .google.privacy.dlp.v2.JobTrigger job_triggers = 1;</code> */ public java.util.List<com.google.privacy.dlp.v2.JobTrigger.Builder> getJobTriggersBuilderList() { return getJobTriggersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.JobTrigger, com.google.privacy.dlp.v2.JobTrigger.Builder, com.google.privacy.dlp.v2.JobTriggerOrBuilder> getJobTriggersFieldBuilder() { if (jobTriggersBuilder_ == null) { jobTriggersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.JobTrigger, com.google.privacy.dlp.v2.JobTrigger.Builder, com.google.privacy.dlp.v2.JobTriggerOrBuilder>( jobTriggers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); jobTriggers_ = null; } return jobTriggersBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * If the next page is available then this value is the next page token to be * used in the following ListJobTriggers request. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * If the next page is available then this value is the next page token to be * used in the following ListJobTriggers request. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * If the next page is available then this value is the next page token to be * used in the following ListJobTriggers request. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * If the next page is available then this value is the next page token to be * used in the following ListJobTriggers request. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * If the next page is available then this value is the next page token to be * used in the following ListJobTriggers request. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ListJobTriggersResponse) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListJobTriggersResponse) private static final com.google.privacy.dlp.v2.ListJobTriggersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ListJobTriggersResponse(); } public static com.google.privacy.dlp.v2.ListJobTriggersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListJobTriggersResponse> PARSER = new com.google.protobuf.AbstractParser<ListJobTriggersResponse>() { @java.lang.Override public ListJobTriggersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListJobTriggersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListJobTriggersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.ListJobTriggersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/derby
36,363
java/stubs/felix/org/osgi/framework/BundleContext.java
/* * $Header: /cvshome/build/org.osgi.framework/src/org/osgi/framework/BundleContext.java,v 1.22 2007/02/21 16:49:05 hargrave Exp $ * * Copyright (c) OSGi Alliance (2000, 2007). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.osgi.framework; import java.io.File; import java.io.InputStream; import java.util.Dictionary; /** * A bundle's execution context within the Framework. The context is used to * grant access to other methods so that this bundle can interact with the * Framework. * * <p> * <code>BundleContext</code> methods allow a bundle to: * <ul> * <li>Subscribe to events published by the Framework. * <li>Register service objects with the Framework service registry. * <li>Retrieve <code>ServiceReferences</code> from the Framework service * registry. * <li>Get and release service objects for a referenced service. * <li>Install new bundles in the Framework. * <li>Get the list of bundles installed in the Framework. * <li>Get the {@link Bundle} object for a bundle. * <li>Create <code>File</code> objects for files in a persistent storage * area provided for the bundle by the Framework. * </ul> * * <p> * A <code>BundleContext</code> object will be created and provided to the * bundle associated with this context when it is started using the * {@link BundleActivator#start} method. The same <code>BundleContext</code> * object will be passed to the bundle associated with this context when it is * stopped using the {@link BundleActivator#stop} method. A * <code>BundleContext</code> object is generally for the private use of its * associated bundle and is not meant to be shared with other bundles in the * OSGi environment. * * <p> * The <code>Bundle</code> object associated with a <code>BundleContext</code> * object is called the <em>context bundle</em>. * * <p> * The <code>BundleContext</code> object is only valid during the execution of * its context bundle; that is, during the period from when the context bundle * is in the <code>STARTING</code>, <code>STOPPING</code>, and * <code>ACTIVE</code> bundle states. If the <code>BundleContext</code> * object is used subsequently, an <code>IllegalStateException</code> must be * thrown. The <code>BundleContext</code> object must never be reused after * its context bundle is stopped. * * <p> * The Framework is the only entity that can create <code>BundleContext</code> * objects and they are only valid within the Framework that created them. * * @ThreadSafe * @version $Revision: 1.22 $ */ public interface BundleContext { /** * Returns the value of the specified property. If the key is not found in * the Framework properties, the system properties are then searched. The * method returns <code>null</code> if the property is not found. * * <p> * The Framework defines the following standard property keys: * </p> * <ul> * <li>{@link Constants#FRAMEWORK_VERSION} - The OSGi Framework version. * </li> * <li>{@link Constants#FRAMEWORK_VENDOR} - The Framework implementation * vendor.</li> * <li>{@link Constants#FRAMEWORK_LANGUAGE} - The language being used. See * ISO 639 for possible values.</li> * <li>{@link Constants#FRAMEWORK_OS_NAME} - The host computer operating * system.</li> * <li>{@link Constants#FRAMEWORK_OS_VERSION} - The host computer * operating system version number.</li> * <li>{@link Constants#FRAMEWORK_PROCESSOR} - The host computer processor * name.</li> * </ul> * <p> * All bundles must have permission to read these properties. * * <p> * Note: The last four standard properties are used by the * {@link Constants#BUNDLE_NATIVECODE} <code>Manifest</code> header's * matching algorithm for selecting native language code. * * @param key The name of the requested property. * @return The value of the requested property, or <code>null</code> if * the property is undefined. * @throws java.lang.SecurityException If the caller does not have the * appropriate <code>PropertyPermission</code> to read the * property, and the Java Runtime Environment supports permissions. */ public String getProperty(String key); /** * Returns the <code>Bundle</code> object associated with this * <code>BundleContext</code>. This bundle is called the context bundle. * * @return The <code>Bundle</code> object associated with this * <code>BundleContext</code>. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. */ public Bundle getBundle(); /** * Installs a bundle from the specified location string. A bundle is * obtained from <code>location</code> as interpreted by the Framework in * an implementation dependent manner. * <p> * Every installed bundle is uniquely identified by its location string, * typically in the form of a URL. * * <p> * The following steps are required to install a bundle: * <ol> * <li>If a bundle containing the same location string is already * installed, the <code>Bundle</code> object for that bundle is returned. * * <li>The bundle's content is read from the location string. If this * fails, a {@link BundleException} is thrown. * * <li>The bundle's <code>Bundle-NativeCode</code> dependencies are * resolved. If this fails, a <code>BundleException</code> is thrown. * * <li>The bundle's associated resources are allocated. The associated * resources minimally consist of a unique identifier and a persistent * storage area if the platform has file system support. If this step fails, * a <code>BundleException</code> is thrown. * * <li>If the bundle has declared an Bundle-RequiredExecutionEnvironment * header, then the listed execution environments must be verified against * the installed execution environments. If none of the listed execution * environments match an installed execution environment, a * <code>BundleException</code> must be thrown. * * <li>The bundle's state is set to <code>INSTALLED</code>. * * <li>A bundle event of type {@link BundleEvent#INSTALLED} is fired. * * <li>The <code>Bundle</code> object for the newly or previously * installed bundle is returned. * </ol> * * <b>Postconditions, no exceptions thrown </b> * <ul> * <li><code>getState()</code> in {<code>INSTALLED</code>,<code>RESOLVED</code>}. * <li>Bundle has a unique ID. * </ul> * <b>Postconditions, when an exception is thrown </b> * <ul> * <li>Bundle is not installed and no trace of the bundle exists. * </ul> * * @param location The location identifier of the bundle to install. * @return The <code>Bundle</code> object of the installed bundle. * @throws BundleException If the installation failed. * @throws java.lang.SecurityException If the caller does not have the * appropriate <code>AdminPermission[installed bundle,LIFECYCLE]</code>, and the * Java Runtime Environment supports permissions. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. */ public Bundle installBundle(String location) throws BundleException; /** * Installs a bundle from the specified <code>InputStream</code> object. * * <p> * This method performs all of the steps listed in * <code>BundleContext.installBundle(String location)</code>, except that * the bundle's content will be read from the <code>InputStream</code> * object. The location identifier string specified will be used as the * identity of the bundle. * * <p> * This method must always close the <code>InputStream</code> object, even * if an exception is thrown. * * @param location The location identifier of the bundle to install. * @param input The <code>InputStream</code> object from which this bundle * will be read. * @return The <code>Bundle</code> object of the installed bundle. * @throws BundleException If the provided stream cannot be read or the * installation failed. * @throws java.lang.SecurityException If the caller does not have the * appropriate <code>AdminPermission[installed bundle,LIFECYCLE]</code>, and the * Java Runtime Environment supports permissions. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @see #installBundle(java.lang.String) */ public Bundle installBundle(String location, InputStream input) throws BundleException; /** * Returns the bundle with the specified identifier. * * @param id The identifier of the bundle to retrieve. * @return A <code>Bundle</code> object or <code>null</code> if the * identifier does not match any installed bundle. */ public Bundle getBundle(long id); /** * Returns a list of all installed bundles. * <p> * This method returns a list of all bundles installed in the OSGi * environment at the time of the call to this method. However, since the * Framework is a very dynamic environment, bundles can be installed or * uninstalled at anytime. * * @return An array of <code>Bundle</code> objects, one object per * installed bundle. */ public Bundle[] getBundles(); /** * Adds the specified <code>ServiceListener</code> object with the * specified <code>filter</code> to the context bundle's list of * listeners. See {@link Filter} for a description of the filter syntax. * <code>ServiceListener</code> objects are notified when a service has a * lifecycle state change. * * <p> * If the context bundle's list of listeners already contains a listener * <code>l</code> such that <code>(l==listener)</code>, then this * method replaces that listener's filter (which may be <code>null</code>) * with the specified one (which may be <code>null</code>). * * <p> * The listener is called if the filter criteria is met. To filter based * upon the class of the service, the filter should reference the * {@link Constants#OBJECTCLASS} property. If <code>filter</code> is * <code>null</code>, all services are considered to match the filter. * * <p> * When using a <code>filter</code>, it is possible that the * <code>ServiceEvent</code>s for the complete lifecycle of a service * will not be delivered to the listener. For example, if the * <code>filter</code> only matches when the property <code>x</code> has * the value <code>1</code>, the listener will not be called if the * service is registered with the property <code>x</code> not set to the * value <code>1</code>. Subsequently, when the service is modified * setting property <code>x</code> to the value <code>1</code>, the * filter will match and the listener will be called with a * <code>ServiceEvent</code> of type <code>MODIFIED</code>. Thus, the * listener will not be called with a <code>ServiceEvent</code> of type * <code>REGISTERED</code>. * * <p> * If the Java Runtime Environment supports permissions, the * <code>ServiceListener</code> object will be notified of a service event * only if the bundle that is registering it has the * <code>ServicePermission</code> to get the service using at least one of * the named classes the service was registered under. * * @param listener The <code>ServiceListener</code> object to be added. * @param filter The filter criteria. * * @throws InvalidSyntaxException If <code>filter</code> contains an * invalid filter string that cannot be parsed. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * * @see ServiceEvent * @see ServiceListener * @see ServicePermission */ public void addServiceListener(ServiceListener listener, String filter) throws InvalidSyntaxException; /** * Adds the specified <code>ServiceListener</code> object to the context * bundle's list of listeners. * * <p> * This method is the same as calling * <code>BundleContext.addServiceListener(ServiceListener listener, * String filter)</code> * with <code>filter</code> set to <code>null</code>. * * @param listener The <code>ServiceListener</code> object to be added. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * * @see #addServiceListener(ServiceListener, String) */ public void addServiceListener(ServiceListener listener); /** * Removes the specified <code>ServiceListener</code> object from the * context bundle's list of listeners. * * <p> * If <code>listener</code> is not contained in this context bundle's list * of listeners, this method does nothing. * * @param listener The <code>ServiceListener</code> to be removed. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. */ public void removeServiceListener(ServiceListener listener); /** * Adds the specified <code>BundleListener</code> object to the context * bundle's list of listeners if not already present. BundleListener objects * are notified when a bundle has a lifecycle state change. * * <p> * If the context bundle's list of listeners already contains a listener * <code>l</code> such that <code>(l==listener)</code>, this method * does nothing. * * @param listener The <code>BundleListener</code> to be added. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @throws java.lang.SecurityException If listener is a * <code>SynchronousBundleListener</code> and the caller does not * have the appropriate <code>AdminPermission[context bundle,LISTENER]</code>, * and the Java Runtime Environment supports permissions. * * @see BundleEvent * @see BundleListener */ public void addBundleListener(BundleListener listener); /** * Removes the specified <code>BundleListener</code> object from the * context bundle's list of listeners. * * <p> * If <code>listener</code> is not contained in the context bundle's list * of listeners, this method does nothing. * * @param listener The <code>BundleListener</code> object to be removed. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @throws java.lang.SecurityException If listener is a * <code>SynchronousBundleListener</code> and the caller does not * have the appropriate <code>AdminPermission[context bundle,LISTENER]</code>, * and the Java Runtime Environment supports permissions. */ public void removeBundleListener(BundleListener listener); /** * Adds the specified <code>FrameworkListener</code> object to the context * bundle's list of listeners if not already present. FrameworkListeners are * notified of general Framework events. * * <p> * If the context bundle's list of listeners already contains a listener * <code>l</code> such that <code>(l==listener)</code>, this method * does nothing. * * @param listener The <code>FrameworkListener</code> object to be added. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * * @see FrameworkEvent * @see FrameworkListener */ public void addFrameworkListener(FrameworkListener listener); /** * Removes the specified <code>FrameworkListener</code> object from the * context bundle's list of listeners. * * <p> * If <code>listener</code> is not contained in the context bundle's list * of listeners, this method does nothing. * * @param listener The <code>FrameworkListener</code> object to be * removed. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. */ public void removeFrameworkListener(FrameworkListener listener); /** * Registers the specified service object with the specified properties * under the specified class names into the Framework. A * <code>ServiceRegistration</code> object is returned. The * <code>ServiceRegistration</code> object is for the private use of the * bundle registering the service and should not be shared with other * bundles. The registering bundle is defined to be the context bundle. * Other bundles can locate the service by using either the * {@link #getServiceReferences} or {@link #getServiceReference} method. * * <p> * A bundle can register a service object that implements the * {@link ServiceFactory} interface to have more flexibility in providing * service objects to other bundles. * * <p> * The following steps are required to register a service: * <ol> * <li>If <code>service</code> is not a <code>ServiceFactory</code>, * an <code>IllegalArgumentException</code> is thrown if * <code>service</code> is not an <code>instanceof</code> all the * classes named. * <li>The Framework adds these service properties to the specified * <code>Dictionary</code> (which may be <code>null</code>): a property * named {@link Constants#SERVICE_ID} identifying the registration number of * the service and a property named {@link Constants#OBJECTCLASS} containing * all the specified classes. If any of these properties have already been * specified by the registering bundle, their values will be overwritten by * the Framework. * <li>The service is added to the Framework service registry and may now * be used by other bundles. * <li>A service event of type {@link ServiceEvent#REGISTERED} is * fired. * <li>A <code>ServiceRegistration</code> object for this registration is * returned. * </ol> * * @param clazzes The class names under which the service can be located. * The class names in this array will be stored in the service's * properties under the key {@link Constants#OBJECTCLASS}. * @param service The service object or a <code>ServiceFactory</code> * object. * @param properties The properties for this service. The keys in the * properties object must all be <code>String</code> objects. See * {@link Constants} for a list of standard service property keys. * Changes should not be made to this object after calling this * method. To update the service's properties the * {@link ServiceRegistration#setProperties} method must be called. * The set of properties may be <code>null</code> if the service * has no properties. * * @return A <code>ServiceRegistration</code> object for use by the bundle * registering the service to update the service's properties or to * unregister the service. * * @throws java.lang.IllegalArgumentException If one of the following is * true: * <ul> * <li><code>service</code> is <code>null</code>. * <li><code>service</code> is not a <code>ServiceFactory</code> * object and is not an instance of all the named classes in * <code>clazzes</code>. * <li><code>properties</code> contains case variants of the same * key name. * </ul> * * @throws java.lang.SecurityException If the caller does not have the * <code>ServicePermission</code> to register the service for all * the named classes and the Java Runtime Environment supports * permissions. * * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * * @see ServiceRegistration * @see ServiceFactory */ public ServiceRegistration registerService(String[] clazzes, Object service, Dictionary properties); /** * Registers the specified service object with the specified properties * under the specified class name with the Framework. * * <p> * This method is otherwise identical to * {@link #registerService(java.lang.String[], java.lang.Object, * java.util.Dictionary)} and is provided as a convenience when * <code>service</code> will only be registered under a single class name. * Note that even in this case the value of the service's * {@link Constants#OBJECTCLASS} property will be an array of strings, * rather than just a single string. * * @param clazz The class name under which the service can be located. * @param service The service object or a <code>ServiceFactory</code> * object. * @param properties The properties for this service. * * @return A <code>ServiceRegistration</code> object for use by the bundle * registering the service to update the service's properties or to * unregister the service. * * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @see #registerService(java.lang.String[], java.lang.Object, * java.util.Dictionary) */ public ServiceRegistration registerService(String clazz, Object service, Dictionary properties); /** * Returns an array of <code>ServiceReference</code> objects. The returned * array of <code>ServiceReference</code> objects contains services that * were registered under the specified class, match the specified filter * criteria, and the packages for the class names under which the services * were registered match the context bundle's packages as defined in * {@link ServiceReference#isAssignableTo(Bundle, String)}. * * <p> * The list is valid at the time of the call to this method, however since * the Framework is a very dynamic environment, services can be modified or * unregistered at anytime. * * <p> * <code>filter</code> is used to select the registered service whose * properties objects contain keys and values which satisfy the filter. See * {@link Filter} for a description of the filter string syntax. * * <p> * If <code>filter</code> is <code>null</code>, all registered services * are considered to match the filter. If <code>filter</code> cannot be * parsed, an {@link InvalidSyntaxException} will be thrown with a human * readable message where the filter became unparsable. * * <p> * The following steps are required to select a set of * <code>ServiceReference</code> objects: * <ol> * <li>If the filter string is not <code>null</code>, the filter string * is parsed and the set <code>ServiceReference</code> objects of * registered services that satisfy the filter is produced. If the filter * string is <code>null</code>, then all registered services are * considered to satisfy the filter. * <li>If the Java Runtime Environment supports permissions, the set of * <code>ServiceReference</code> objects produced by the previous step is * reduced by checking that the caller has the * <code>ServicePermission</code> to get at least one of the class names * under which the service was registered. If the caller does not have the * correct permission for a particular <code>ServiceReference</code> * object, then it is removed from the set. * <li>If <code>clazz</code> is not <code>null</code>, the set is * further reduced to those services that are an <code>instanceof</code> * and were registered under the specified class. The complete list of * classes of which a service is an instance and which were specified when * the service was registered is available from the service's * {@link Constants#OBJECTCLASS} property. * <li>The set is reduced one final time by cycling through each * <code>ServiceReference</code> object and calling * {@link ServiceReference#isAssignableTo(Bundle, String)} with the context * bundle and each class name under which the <code>ServiceReference</code> * object was registered. For any given <code>ServiceReference</code> * object, if any call to * {@link ServiceReference#isAssignableTo(Bundle, String)} returns * <code>false</code>, then it is removed from the set of * <code>ServiceReference</code> objects. * <li>An array of the remaining <code>ServiceReference</code> objects is * returned. * </ol> * * @param clazz The class name with which the service was registered or * <code>null</code> for all services. * @param filter The filter criteria. * @return An array of <code>ServiceReference</code> objects or * <code>null</code> if no services are registered which satisfy * the search. * @throws InvalidSyntaxException If <code>filter</code> contains an * invalid filter string that cannot be parsed. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. */ public ServiceReference[] getServiceReferences(String clazz, String filter) throws InvalidSyntaxException; /** * Returns an array of <code>ServiceReference</code> objects. The returned * array of <code>ServiceReference</code> objects contains services that * were registered under the specified class and match the specified filter * criteria. * * <p> * The list is valid at the time of the call to this method, however since * the Framework is a very dynamic environment, services can be modified or * unregistered at anytime. * * <p> * <code>filter</code> is used to select the registered service whose * properties objects contain keys and values which satisfy the filter. See * {@link Filter} for a description of the filter string syntax. * * <p> * If <code>filter</code> is <code>null</code>, all registered services * are considered to match the filter. If <code>filter</code> cannot be * parsed, an {@link InvalidSyntaxException} will be thrown with a human * readable message where the filter became unparsable. * * <p> * The following steps are required to select a set of * <code>ServiceReference</code> objects: * <ol> * <li>If the filter string is not <code>null</code>, the filter string * is parsed and the set <code>ServiceReference</code> objects of * registered services that satisfy the filter is produced. If the filter * string is <code>null</code>, then all registered services are * considered to satisfy the filter. * <li>If the Java Runtime Environment supports permissions, the set of * <code>ServiceReference</code> objects produced by the previous step is * reduced by checking that the caller has the * <code>ServicePermission</code> to get at least one of the class names * under which the service was registered. If the caller does not have the * correct permission for a particular <code>ServiceReference</code> * object, then it is removed from the set. * <li>If <code>clazz</code> is not <code>null</code>, the set is * further reduced to those services that are an <code>instanceof</code> * and were registered under the specified class. The complete list of * classes of which a service is an instance and which were specified when * the service was registered is available from the service's * {@link Constants#OBJECTCLASS} property. * <li>An array of the remaining <code>ServiceReference</code> objects is * returned. * </ol> * * @param clazz The class name with which the service was registered or * <code>null</code> for all services. * @param filter The filter criteria. * @return An array of <code>ServiceReference</code> objects or * <code>null</code> if no services are registered which satisfy * the search. * @throws InvalidSyntaxException If <code>filter</code> contains an * invalid filter string that cannot be parsed. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @since 1.3 */ public ServiceReference[] getAllServiceReferences(String clazz, String filter) throws InvalidSyntaxException; /** * Returns a <code>ServiceReference</code> object for a service that * implements and was registered under the specified class. * * <p> * This <code>ServiceReference</code> object is valid at the time of the * call to this method, however as the Framework is a very dynamic * environment, services can be modified or unregistered at anytime. * * <p> * This method is the same as calling * {@link BundleContext#getServiceReferences(String, String)} with a * <code>null</code> filter string. It is provided as a convenience for * when the caller is interested in any service that implements the * specified class. * <p> * If multiple such services exist, the service with the highest ranking (as * specified in its {@link Constants#SERVICE_RANKING} property) is returned. * <p> * If there is a tie in ranking, the service with the lowest service ID (as * specified in its {@link Constants#SERVICE_ID} property); that is, the * service that was registered first is returned. * * @param clazz The class name with which the service was registered. * @return A <code>ServiceReference</code> object, or <code>null</code> * if no services are registered which implement the named class. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @see #getServiceReferences(String, String) */ public ServiceReference getServiceReference(String clazz); /** * Returns the specified service object for a service. * <p> * A bundle's use of a service is tracked by the bundle's use count of that * service. Each time a service's service object is returned by * {@link #getService(ServiceReference)} the context bundle's use count for * that service is incremented by one. Each time the service is released by * {@link #ungetService(ServiceReference)} the context bundle's use count * for that service is decremented by one. * <p> * When a bundle's use count for a service drops to zero, the bundle should * no longer use that service. * * <p> * This method will always return <code>null</code> when the service * associated with this <code>reference</code> has been unregistered. * * <p> * The following steps are required to get the service object: * <ol> * <li>If the service has been unregistered, <code>null</code> is * returned. * <li>The context bundle's use count for this service is incremented by * one. * <li>If the context bundle's use count for the service is currently one * and the service was registered with an object implementing the * <code>ServiceFactory</code> interface, the * {@link ServiceFactory#getService(Bundle, ServiceRegistration)} method is * called to create a service object for the context bundle. This service * object is cached by the Framework. While the context bundle's use count * for the service is greater than zero, subsequent calls to get the * services's service object for the context bundle will return the cached * service object. <br> * If the service object returned by the <code>ServiceFactory</code> * object is not an <code>instanceof</code> all the classes named when the * service was registered or the <code>ServiceFactory</code> object throws * an exception, <code>null</code> is returned and a Framework event of * type {@link FrameworkEvent#ERROR} is fired. * <li>The service object for the service is returned. * </ol> * * @param reference A reference to the service. * @return A service object for the service associated with * <code>reference</code> or <code>null</code> if the service is * not registered or does not implement the classes under which it * was registered in the case of a <code>ServiceFactory</code>. * @throws java.lang.SecurityException If the caller does not have the * <code>ServicePermission</code> to get the service using at * least one of the named classes the service was registered under * and the Java Runtime Environment supports permissions. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @see #ungetService(ServiceReference) * @see ServiceFactory */ public Object getService(ServiceReference reference); /** * Releases the service object referenced by the specified * <code>ServiceReference</code> object. If the context bundle's use count * for the service is zero, this method returns <code>false</code>. * Otherwise, the context bundle's use count for the service is decremented * by one. * * <p> * The service's service object should no longer be used and all references * to it should be destroyed when a bundle's use count for the service drops * to zero. * * <p> * The following steps are required to unget the service object: * <ol> * <li>If the context bundle's use count for the service is zero or the * service has been unregistered, <code>false</code> is returned. * <li>The context bundle's use count for this service is decremented by * one. * <li>If the context bundle's use count for the service is currently zero * and the service was registered with a <code>ServiceFactory</code> * object, the * {@link ServiceFactory#ungetService(Bundle, ServiceRegistration, Object)} * method is called to release the service object for the context bundle. * <li><code>true</code> is returned. * </ol> * * @param reference A reference to the service to be released. * @return <code>false</code> if the context bundle's use count for the * service is zero or if the service has been unregistered; * <code>true</code> otherwise. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * @see #getService * @see ServiceFactory */ public boolean ungetService(ServiceReference reference); /** * Creates a <code>File</code> object for a file in the persistent storage * area provided for the bundle by the Framework. This method will return * <code>null</code> if the platform does not have file system support. * * <p> * A <code>File</code> object for the base directory of the persistent * storage area provided for the context bundle by the Framework can be * obtained by calling this method with an empty string as * <code>filename</code>. * * <p> * If the Java Runtime Environment supports permissions, the Framework will * ensure that the bundle has the <code>java.io.FilePermission</code> with * actions <code>read</code>,<code>write</code>,<code>delete</code> * for all files (recursively) in the persistent storage area provided for * the context bundle. * * @param filename A relative name to the file to be accessed. * @return A <code>File</code> object that represents the requested file * or <code>null</code> if the platform does not have file system * support. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. */ public File getDataFile(String filename); /** * Creates a <code>Filter</code> object. This <code>Filter</code> object * may be used to match a <code>ServiceReference</code> object or a * <code>Dictionary</code> object. * * <p> * If the filter cannot be parsed, an {@link InvalidSyntaxException} will be * thrown with a human readable message where the filter became unparsable. * * @param filter The filter string. * @return A <code>Filter</code> object encapsulating the filter string. * @throws InvalidSyntaxException If <code>filter</code> contains an * invalid filter string that cannot be parsed. * @throws NullPointerException If <code>filter</code> is null. * @throws java.lang.IllegalStateException If this BundleContext is no * longer valid. * * @since 1.1 * @see "Framework specification for a description of the filter string syntax." * @see FrameworkUtil#createFilter(String) */ public Filter createFilter(String filter) throws InvalidSyntaxException; }
apache/pinot
36,763
pinot-segment-spi/src/main/java/org/apache/pinot/segment/spi/index/reader/ForwardIndexReader.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.segment.spi.index.reader; import java.math.BigDecimal; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Nullable; import org.apache.pinot.segment.spi.compression.ChunkCompressionType; import org.apache.pinot.segment.spi.compression.DictIdCompressionType; import org.apache.pinot.segment.spi.index.IndexReader; import org.apache.pinot.spi.data.FieldSpec.DataType; import org.apache.pinot.spi.utils.BigDecimalUtils; import org.apache.pinot.spi.utils.BytesUtils; import org.apache.pinot.spi.utils.MapUtils; import org.apache.pinot.spi.utils.hash.MurmurHashFunctions; /** * Interface for forward index reader. * * @param <T> Type of the ReaderContext */ public interface ForwardIndexReader<T extends ForwardIndexReaderContext> extends IndexReader { /** * Returns {@code true} if the forward index is dictionary-encoded, {@code false} if it is raw. */ boolean isDictionaryEncoded(); /** * Returns {@code true} if the forward index is for a single-value column, {@code false} if it is for a multi-value * column. */ boolean isSingleValue(); /** * Returns the data type of the values in the forward index. Returns {@link DataType#INT} for dictionary-encoded * forward index. */ DataType getStoredType(); /** * Returns the compression type (if valid). Only valid for RAW forward index columns implemented in * BaseChunkForwardIndexReader. */ @Nullable default ChunkCompressionType getCompressionType() { return null; } /** * Returns the compression type for dictionary encoded forward index. */ @Nullable default DictIdCompressionType getDictIdCompressionType() { return null; } /** * Returns the length of the longest entry. Only valid for RAW forward index columns implemented in * BaseChunkForwardIndexReader. Returns -1 otherwise. * @return */ default int getLengthOfLongestEntry() { return -1; } /** * Creates a new {@link ForwardIndexReaderContext} of the reader which can be used to accelerate the reads. * NOTE: Caller is responsible for closing the returned reader context. */ @Nullable default T createContext() { return null; } /** * DICTIONARY-ENCODED INDEX APIs */ /** * Reads the dictionary id for a single-value column at the given document id. * * @param docId Document id * @param context Reader context * @return Dictionary id at the given document id */ default int getDictId(int docId, T context) { throw new UnsupportedOperationException(); } /** * Batch reads multiple dictionary ids for a single-value column at the given document ids into the passed in buffer * (the buffer size must be larger than or equal to the length). * * @param docIds Array containing the document ids to read * @param length Number of values to read * @param dictIdBuffer Dictionary id buffer * @param context Reader context */ default void readDictIds(int[] docIds, int length, int[] dictIdBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the dictionary ids for a multi-value column at the given document id into the passed in buffer (the buffer * size must be enough to hold all the values for the multi-value entry) and returns the number of values within the * multi-value entry. * * @param docId Document id * @param dictIdBuffer Dictionary id buffer * @param context Reader context * @return Number of values within the multi-value entry */ default int getDictIdMV(int docId, int[] dictIdBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the dictionary ids for a multi-value column at the given document id. * * @param docId Document id * @param context Reader context * @return Dictionary ids at the given document id */ default int[] getDictIdMV(int docId, T context) { throw new UnsupportedOperationException(); } /** * SINGLE-VALUE COLUMN RAW INDEX APIs */ /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param values Values to fill * @param context Reader context */ default void readValuesSV(int[] docIds, int length, int[] values, T context) { switch (getStoredType()) { case INT: for (int i = 0; i < length; i++) { values[i] = getInt(docIds[i], context); } break; case LONG: for (int i = 0; i < length; i++) { values[i] = (int) getLong(docIds[i], context); } break; case FLOAT: for (int i = 0; i < length; i++) { values[i] = (int) getFloat(docIds[i], context); } break; case DOUBLE: for (int i = 0; i < length; i++) { values[i] = (int) getDouble(docIds[i], context); } break; case BIG_DECIMAL: for (int i = 0; i < length; i++) { values[i] = getBigDecimal(docIds[i], context).intValue(); } break; case STRING: for (int i = 0; i < length; i++) { values[i] = Integer.parseInt(getString(docIds[i], context)); } break; default: throw new IllegalArgumentException(); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param values Values to fill * @param context Reader context */ default void readValuesSV(int[] docIds, int length, long[] values, T context) { switch (getStoredType()) { case INT: for (int i = 0; i < length; i++) { values[i] = getInt(docIds[i], context); } break; case LONG: for (int i = 0; i < length; i++) { values[i] = getLong(docIds[i], context); } break; case FLOAT: for (int i = 0; i < length; i++) { values[i] = (long) getFloat(docIds[i], context); } break; case DOUBLE: for (int i = 0; i < length; i++) { values[i] = (long) getDouble(docIds[i], context); } break; case BIG_DECIMAL: for (int i = 0; i < length; i++) { values[i] = getBigDecimal(docIds[i], context).longValue(); } break; case STRING: for (int i = 0; i < length; i++) { values[i] = Long.parseLong(getString(docIds[i], context)); } break; default: throw new IllegalArgumentException(); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param values Values to fill * @param context Reader context */ default void readValuesSV(int[] docIds, int length, float[] values, T context) { switch (getStoredType()) { case INT: for (int i = 0; i < length; i++) { values[i] = getInt(docIds[i], context); } break; case LONG: for (int i = 0; i < length; i++) { values[i] = getLong(docIds[i], context); } break; case FLOAT: for (int i = 0; i < length; i++) { values[i] = getFloat(docIds[i], context); } break; case DOUBLE: for (int i = 0; i < length; i++) { values[i] = (float) getDouble(docIds[i], context); } break; case BIG_DECIMAL: for (int i = 0; i < length; i++) { values[i] = getBigDecimal(docIds[i], context).floatValue(); } break; case STRING: for (int i = 0; i < length; i++) { values[i] = Float.parseFloat(getString(docIds[i], context)); } break; default: throw new IllegalArgumentException(); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param values Values to fill * @param context Reader context */ default void readValuesSV(int[] docIds, int length, double[] values, T context) { switch (getStoredType()) { case INT: for (int i = 0; i < length; i++) { values[i] = getInt(docIds[i], context); } break; case LONG: for (int i = 0; i < length; i++) { values[i] = getLong(docIds[i], context); } break; case FLOAT: for (int i = 0; i < length; i++) { values[i] = getFloat(docIds[i], context); } break; case DOUBLE: for (int i = 0; i < length; i++) { values[i] = getDouble(docIds[i], context); } break; case BIG_DECIMAL: for (int i = 0; i < length; i++) { values[i] = getBigDecimal(docIds[i], context).doubleValue(); } break; case STRING: for (int i = 0; i < length; i++) { values[i] = Double.parseDouble(getString(docIds[i], context)); } break; default: throw new IllegalArgumentException(); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param values Values to fill * @param context Reader context */ default void readValuesSV(int[] docIds, int length, BigDecimal[] values, T context) { // todo(nhejazi): add raw index support to the BIG_DECIMAL type. In most of the cases, it will be more efficient // to store big decimal as raw. switch (getStoredType()) { case INT: for (int i = 0; i < length; i++) { values[i] = BigDecimal.valueOf(getInt(docIds[i], context)); } break; case LONG: for (int i = 0; i < length; i++) { values[i] = BigDecimal.valueOf(getLong(docIds[i], context)); } break; case FLOAT: for (int i = 0; i < length; i++) { values[i] = BigDecimal.valueOf(getFloat(docIds[i], context)); } break; case DOUBLE: for (int i = 0; i < length; i++) { values[i] = BigDecimal.valueOf(getDouble(docIds[i], context)); } break; case BIG_DECIMAL: for (int i = 0; i < length; i++) { values[i] = getBigDecimal(docIds[i], context); } break; case STRING: for (int i = 0; i < length; i++) { values[i] = new BigDecimal(getString(docIds[i], context)); } break; case BYTES: for (int i = 0; i < length; i++) { values[i] = BigDecimalUtils.deserialize(getBytes(docIds[i], context)); } break; default: throw new IllegalArgumentException(); } } default void readValuesSV(int[] docIds, int length, String[] values, T context) { switch (getStoredType()) { case INT: for (int i = 0; i < length; i++) { values[i] = Integer.toString(getInt(docIds[i], context)); } break; case LONG: for (int i = 0; i < length; i++) { values[i] = Long.toString(getLong(docIds[i], context)); } break; case FLOAT: for (int i = 0; i < length; i++) { values[i] = Float.toString(getFloat(docIds[i], context)); } break; case DOUBLE: for (int i = 0; i < length; i++) { values[i] = Double.toString(getDouble(docIds[i], context)); } break; case BIG_DECIMAL: for (int i = 0; i < length; i++) { values[i] = getBigDecimal(docIds[i], context).toPlainString(); } break; case STRING: for (int i = 0; i < length; i++) { values[i] = getString(docIds[i], context); } break; case BYTES: for (int i = 0; i < length; i++) { values[i] = BytesUtils.toHexString(getBytes(docIds[i], context)); } break; case MAP: for (int i = 0; i < length; i++) { values[i] = MapUtils.toString(getMap(docIds[i], context)); } break; default: throw new IllegalStateException(); } } /** * Reads the INT value at the given document id. * * @param docId Document id * @param context Reader context * @return INT type single-value at the given document id */ default int getInt(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the LONG type single-value at the given document id. * * @param docId Document id * @param context Reader context * @return LONG type single-value at the given document id */ default long getLong(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the FLOAT type single-value at the given document id. * * @param docId Document id * @param context Reader context * @return FLOAT type single-value at the given document id */ default float getFloat(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the DOUBLE type single-value at the given document id. * * @param docId Document id * @param context Reader context * @return DOUBLE type single-value at the given document id */ default double getDouble(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the BIG_DECIMAL type single-value at the given document id. * * @param docId Document id * @param context Reader context * @return BIG_DECIMAL type single-value at the given document id */ default BigDecimal getBigDecimal(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the STRING type single-value at the given document id. * * @param docId Document id * @param context Reader context * @return STRING type single-value at the given document id */ default String getString(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the BYTES type single-value at the given document id. * * @param docId Document id * @param context Reader context * @return BYTES type single-value at the given document id */ default byte[] getBytes(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the MAP type single-value at the given document id. * * @param docId Document id * @param context Reader context * @return MAP type single-value at the given document id */ default Map<String, Object> getMap(int docId, T context) { throw new UnsupportedOperationException("This ForwardIndexReader does not support MAP types. " + "This indicates that either the column is getting mistyped or the wrong " + "ForwardIndexReader is being created to read this column."); } default int get32BitsMurmur3Hash(int docId, T context) { return MurmurHashFunctions.murmurHash3X64Bit32(getBytes(docId, context), 0); } default long get64BitsMurmur3Hash(int docId, T context) { return MurmurHashFunctions.murmurHash3X64Bit64(getBytes(docId, context), 0); } default long[] get128BitsMurmur3Hash(int docId, T context) { return MurmurHashFunctions.murmurHash3X64Bit128AsLongs(getBytes(docId, context), 0); } /** * MULTI-VALUE COLUMN RAW INDEX APIs */ /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param maxNumValuesPerMVEntry Maximum number of values per MV entry * @param values Values to fill * @param context Reader context */ default void readValuesMV(int[] docIds, int length, int maxNumValuesPerMVEntry, int[][] values, T context) { switch (getStoredType()) { case INT: for (int i = 0; i < length; i++) { values[i] = getIntMV(docIds[i], context); } break; case LONG: long[] longValueBuffer = new long[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getLongMV(docIds[i], longValueBuffer, context); values[i] = new int[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = (int) longValueBuffer[j]; } } break; case FLOAT: float[] floatValueBuffer = new float[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getFloatMV(docIds[i], floatValueBuffer, context); values[i] = new int[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = (int) floatValueBuffer[j]; } } break; case DOUBLE: double[] doubleValueBuffer = new double[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getDoubleMV(docIds[i], doubleValueBuffer, context); values[i] = new int[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = (int) doubleValueBuffer[j]; } } break; case STRING: String[] stringValueBuffer = new String[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getStringMV(docIds[i], stringValueBuffer, context); values[i] = new int[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = Integer.parseInt(stringValueBuffer[j]); } } break; default: throw new IllegalArgumentException("readValuesMV not supported for type " + getStoredType()); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param maxNumValuesPerMVEntry Maximum number of values per MV entry * @param values Values to fill * @param context Reader context */ default void readValuesMV(int[] docIds, int length, int maxNumValuesPerMVEntry, long[][] values, T context) { switch (getStoredType()) { case INT: int[] intValueBuffer = new int[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getIntMV(docIds[i], intValueBuffer, context); values[i] = new long[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = intValueBuffer[j]; } } break; case LONG: for (int i = 0; i < length; i++) { values[i] = getLongMV(docIds[i], context); } break; case FLOAT: float[] floatValueBuffer = new float[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getFloatMV(docIds[i], floatValueBuffer, context); values[i] = new long[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = (long) floatValueBuffer[j]; } } break; case DOUBLE: double[] doubleValueBuffer = new double[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getDoubleMV(docIds[i], doubleValueBuffer, context); values[i] = new long[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = (long) doubleValueBuffer[j]; } } break; case STRING: String[] stringValueBuffer = new String[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getStringMV(docIds[i], stringValueBuffer, context); values[i] = new long[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = Long.parseLong(stringValueBuffer[j]); } } break; default: throw new IllegalArgumentException("readValuesMV not supported for type " + getStoredType()); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param maxNumValuesPerMVEntry Maximum number of values per MV entry * @param values Values to fill * @param context Reader context */ default void readValuesMV(int[] docIds, int length, int maxNumValuesPerMVEntry, float[][] values, T context) { switch (getStoredType()) { case INT: int[] intValueBuffer = new int[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getIntMV(docIds[i], intValueBuffer, context); values[i] = new float[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = intValueBuffer[j]; } } break; case LONG: long[] longValueBuffer = new long[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getLongMV(docIds[i], longValueBuffer, context); values[i] = new float[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = longValueBuffer[j]; } } break; case FLOAT: for (int i = 0; i < length; i++) { values[i] = getFloatMV(docIds[i], context); } break; case DOUBLE: double[] doubleValueBuffer = new double[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getDoubleMV(docIds[i], doubleValueBuffer, context); values[i] = new float[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = (float) doubleValueBuffer[j]; } } break; case STRING: String[] stringValueBuffer = new String[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getStringMV(docIds[i], stringValueBuffer, context); values[i] = new float[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = Float.parseFloat(stringValueBuffer[j]); } } break; default: throw new IllegalArgumentException("readValuesMV not supported for type " + getStoredType()); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param maxNumValuesPerMVEntry Maximum number of values per MV entry * @param values Values to fill * @param context Reader context */ default void readValuesMV(int[] docIds, int length, int maxNumValuesPerMVEntry, double[][] values, T context) { switch (getStoredType()) { case INT: int[] intValueBuffer = new int[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getIntMV(docIds[i], intValueBuffer, context); values[i] = new double[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = intValueBuffer[j]; } } break; case LONG: long[] longValueBuffer = new long[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getLongMV(docIds[i], longValueBuffer, context); values[i] = new double[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = longValueBuffer[j]; } } break; case FLOAT: float[] floatValueBuffer = new float[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getFloatMV(docIds[i], floatValueBuffer, context); values[i] = new double[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = floatValueBuffer[j]; } } break; case DOUBLE: for (int i = 0; i < length; i++) { values[i] = getDoubleMV(docIds[i], context); } break; case STRING: String[] stringValueBuffer = new String[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getStringMV(docIds[i], stringValueBuffer, context); values[i] = new double[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = Double.parseDouble(stringValueBuffer[j]); } } break; default: throw new IllegalArgumentException("readValuesMV not supported for type " + getStoredType()); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param maxNumValuesPerMVEntry Maximum number of values per MV entry * @param values Values to fill * @param context Reader context */ default void readValuesMV(int[] docIds, int length, int maxNumValuesPerMVEntry, String[][] values, T context) { switch (getStoredType()) { case INT: int[] intValueBuffer = new int[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getIntMV(docIds[i], intValueBuffer, context); values[i] = new String[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = String.valueOf(intValueBuffer[j]); } } break; case LONG: long[] longValueBuffer = new long[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getLongMV(docIds[i], longValueBuffer, context); values[i] = new String[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = String.valueOf(longValueBuffer[j]); } } break; case FLOAT: float[] floatValueBuffer = new float[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getFloatMV(docIds[i], floatValueBuffer, context); values[i] = new String[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = String.valueOf(floatValueBuffer[j]); } } break; case DOUBLE: double[] doubleValueBuffer = new double[maxNumValuesPerMVEntry]; for (int i = 0; i < length; i++) { int numValues = getDoubleMV(docIds[i], doubleValueBuffer, context); values[i] = new String[numValues]; for (int j = 0; j < numValues; j++) { values[i][j] = String.valueOf(doubleValueBuffer[j]); } } break; case STRING: for (int i = 0; i < length; i++) { values[i] = getStringMV(docIds[i], context); } break; default: throw new IllegalArgumentException("readValuesMV not supported for type " + getStoredType()); } } /** * Fills the values * @param docIds Array containing the document ids to read * @param length Number of values to read * @param maxNumValuesPerMVEntry Maximum number of values per MV entry * @param values Values to fill * @param context Reader context */ default void readValuesMV(int[] docIds, int length, int maxNumValuesPerMVEntry, byte[][][] values, T context) { for (int i = 0; i < length; i++) { values[i] = getBytesMV(docIds[i], context); } } /** * Reads the INT type multi-value at the given document id into the passed in value buffer (the buffer size must be * enough to hold all the values for the multi-value entry) and returns the number of values within the multi-value * entry. * * @param docId Document id * @param valueBuffer Value buffer * @param context Reader context * @return Number of values within the multi-value entry */ default int getIntMV(int docId, int[] valueBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the INT type multi-value at the given document id. * * @param docId Document id * @param context Reader context * @return INT values at the given document id */ default int[] getIntMV(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the LONG type multi-value at the given document id into the passed in value buffer (the buffer size must be * enough to hold all the values for the multi-value entry) and returns the number of values within the multi-value * entry. * * @param docId Document id * @param valueBuffer Value buffer * @param context Reader context * @return Number of values within the multi-value entry */ default int getLongMV(int docId, long[] valueBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the LONG type multi-value at the given document id. * * @param docId Document id * @param context Reader context * @return LONG values at the given document id */ default long[] getLongMV(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the FLOAT type multi-value at the given document id into the passed in value buffer (the buffer size must be * enough to hold all the values for the multi-value entry) and returns the number of values within the multi-value * entry. * * @param docId Document id * @param valueBuffer Value buffer * @param context Reader context * @return Number of values within the multi-value entry */ default int getFloatMV(int docId, float[] valueBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the FLOAT type multi-value at the given document id. * * @param docId Document id * @param context Reader context * @return FLOAT values at the given document id */ default float[] getFloatMV(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the DOUBLE type multi-value at the given document id into the passed in value buffer (the buffer size must * be enough to hold all the values for the multi-value entry) and returns the number of values within the multi-value * entry. * * @param docId Document id * @param valueBuffer Value buffer * @param context Reader context * @return Number of values within the multi-value entry */ default int getDoubleMV(int docId, double[] valueBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the DOUBLE type multi-value at the given document id. * * @param docId Document id * @param context Reader context * @return DOUBLE values at the given document id */ default double[] getDoubleMV(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the STRING type multi-value at the given document id into the passed in value buffer (the buffer size must * be enough to hold all the values for the multi-value entry) and returns the number of values within the multi-value * entry. * * @param docId Document id * @param valueBuffer Value buffer * @param context Reader context * @return Number of values within the multi-value entry */ default int getStringMV(int docId, String[] valueBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the STRING type multi-value at the given document id. * * @param docId Document id * @param context Reader context * @return STRING values at the given document id */ default String[] getStringMV(int docId, T context) { throw new UnsupportedOperationException(); } /** * Reads the bytes type multi-value at the given document id into the passed in value buffer (the buffer size must * be enough to hold all the values for the multi-value entry) and returns the number of values within the multi-value * entry. * * @param docId Document id * @param valueBuffer Value buffer * @param context Reader context * @return Number of values within the multi-value entry */ default int getBytesMV(int docId, byte[][] valueBuffer, T context) { throw new UnsupportedOperationException(); } /** * Reads the bytes type multi-value at the given document id. * * @param docId Document id * @param context Reader context * @return BYTE values at the given document id */ default byte[][] getBytesMV(int docId, T context) { throw new UnsupportedOperationException(); } /** * Gets the number of multi-values at a given document id and returns it. * * @param docId Document id * @param context Reader context * @return Number of values within the multi-value entry */ default int getNumValuesMV(int docId, T context) { throw new UnsupportedOperationException(); } // Functions for recording absolute buffer byte ranges accessed while reading a given docId /** * Returns whether the forward index supports recording the byte ranges accessed while reading a given docId. * For readers that do support this info, caller should check if the buffer is a {@link #isFixedOffsetMappingType}. * If yes, the byte range mapping for a docId can be calculated using the {@link #getRawDataStartOffset} and the * {@link #getDocLength} functions. * if not, caller should use the {@link #recordDocIdByteRanges} function to get the list of byte ranges accessed * for a docId. */ default boolean isBufferByteRangeInfoSupported() { return false; } /** * Returns a list of {@link ByteRange} that represents all the distinct * buffer byte ranges (absolute offset, sizeInBytes) that are accessed when reading the given (@param docId} * @param docId to find the range for * @param context Reader context * @param ranges List of {@link ByteRange} to which the applicable value ranges will be added */ default void recordDocIdByteRanges(int docId, T context, List<ByteRange> ranges) { throw new UnsupportedOperationException(); } /** * Returns whether the forward index is of fixed length type, and therefore the docId -> byte range mapping is fixed * @return true if forward index has a fixed mapping of docId -> buffer offsets * (eg: FixedBitSVForwardIndexReader, FixedByteChunkSVForwardIndexReader (if buffer is uncompressed) etc), false * otherwise */ default boolean isFixedOffsetMappingType() { throw new UnsupportedOperationException(); } /** * Returns the base offset of raw data start within the fwd index buffer, if it's of fixed offset mapping type * @return raw data start offset if the reader is of fixed offset mapping type */ default long getRawDataStartOffset() { throw new UnsupportedOperationException(); } /** * Returns the length of each entry in the forward index, if it's of fixed offset mapping type */ default int getDocLength() { throw new UnsupportedOperationException(); } /** * Returns whether the length of each entry in the forward index is in bits, if it's of fixed offset mapping type */ default boolean isDocLengthInBits() { return false; } /** * This class represents the buffer byte ranges accessed while reading a given docId. */ class ByteRange { private final long _offset; private final int _sizeInBytes; public ByteRange(long offset, int sizeInBytes) { _offset = offset; _sizeInBytes = sizeInBytes; } public long getOffset() { return _offset; } public int getSizeInBytes() { return _sizeInBytes; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ByteRange byteRange = (ByteRange) o; return _offset == byteRange._offset && _sizeInBytes == byteRange._sizeInBytes; } @Override public int hashCode() { return Objects.hash(_offset, _sizeInBytes); } @Override public String toString() { return "Range{" + "_offset=" + _offset + ", _size=" + _sizeInBytes + '}'; } } }
googleapis/google-cloud-java
36,683
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListIndexEndpointsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/warehouse.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Response message for ListIndexEndpoints. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListIndexEndpointsResponse} */ public final class ListIndexEndpointsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListIndexEndpointsResponse) ListIndexEndpointsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListIndexEndpointsResponse.newBuilder() to construct. private ListIndexEndpointsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListIndexEndpointsResponse() { indexEndpoints_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListIndexEndpointsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexEndpointsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexEndpointsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListIndexEndpointsResponse.class, com.google.cloud.visionai.v1.ListIndexEndpointsResponse.Builder.class); } public static final int INDEX_ENDPOINTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.visionai.v1.IndexEndpoint> indexEndpoints_; /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.visionai.v1.IndexEndpoint> getIndexEndpointsList() { return indexEndpoints_; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.visionai.v1.IndexEndpointOrBuilder> getIndexEndpointsOrBuilderList() { return indexEndpoints_; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ @java.lang.Override public int getIndexEndpointsCount() { return indexEndpoints_.size(); } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.IndexEndpoint getIndexEndpoints(int index) { return indexEndpoints_.get(index); } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.IndexEndpointOrBuilder getIndexEndpointsOrBuilder(int index) { return indexEndpoints_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < indexEndpoints_.size(); i++) { output.writeMessage(1, indexEndpoints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < indexEndpoints_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, indexEndpoints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListIndexEndpointsResponse)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListIndexEndpointsResponse other = (com.google.cloud.visionai.v1.ListIndexEndpointsResponse) obj; if (!getIndexEndpointsList().equals(other.getIndexEndpointsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getIndexEndpointsCount() > 0) { hash = (37 * hash) + INDEX_ENDPOINTS_FIELD_NUMBER; hash = (53 * hash) + getIndexEndpointsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.visionai.v1.ListIndexEndpointsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListIndexEndpoints. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListIndexEndpointsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListIndexEndpointsResponse) com.google.cloud.visionai.v1.ListIndexEndpointsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexEndpointsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexEndpointsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListIndexEndpointsResponse.class, com.google.cloud.visionai.v1.ListIndexEndpointsResponse.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListIndexEndpointsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (indexEndpointsBuilder_ == null) { indexEndpoints_ = java.util.Collections.emptyList(); } else { indexEndpoints_ = null; indexEndpointsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexEndpointsResponse_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexEndpointsResponse getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListIndexEndpointsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexEndpointsResponse build() { com.google.cloud.visionai.v1.ListIndexEndpointsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexEndpointsResponse buildPartial() { com.google.cloud.visionai.v1.ListIndexEndpointsResponse result = new com.google.cloud.visionai.v1.ListIndexEndpointsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.visionai.v1.ListIndexEndpointsResponse result) { if (indexEndpointsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { indexEndpoints_ = java.util.Collections.unmodifiableList(indexEndpoints_); bitField0_ = (bitField0_ & ~0x00000001); } result.indexEndpoints_ = indexEndpoints_; } else { result.indexEndpoints_ = indexEndpointsBuilder_.build(); } } private void buildPartial0(com.google.cloud.visionai.v1.ListIndexEndpointsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListIndexEndpointsResponse) { return mergeFrom((com.google.cloud.visionai.v1.ListIndexEndpointsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListIndexEndpointsResponse other) { if (other == com.google.cloud.visionai.v1.ListIndexEndpointsResponse.getDefaultInstance()) return this; if (indexEndpointsBuilder_ == null) { if (!other.indexEndpoints_.isEmpty()) { if (indexEndpoints_.isEmpty()) { indexEndpoints_ = other.indexEndpoints_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureIndexEndpointsIsMutable(); indexEndpoints_.addAll(other.indexEndpoints_); } onChanged(); } } else { if (!other.indexEndpoints_.isEmpty()) { if (indexEndpointsBuilder_.isEmpty()) { indexEndpointsBuilder_.dispose(); indexEndpointsBuilder_ = null; indexEndpoints_ = other.indexEndpoints_; bitField0_ = (bitField0_ & ~0x00000001); indexEndpointsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getIndexEndpointsFieldBuilder() : null; } else { indexEndpointsBuilder_.addAllMessages(other.indexEndpoints_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.visionai.v1.IndexEndpoint m = input.readMessage( com.google.cloud.visionai.v1.IndexEndpoint.parser(), extensionRegistry); if (indexEndpointsBuilder_ == null) { ensureIndexEndpointsIsMutable(); indexEndpoints_.add(m); } else { indexEndpointsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.visionai.v1.IndexEndpoint> indexEndpoints_ = java.util.Collections.emptyList(); private void ensureIndexEndpointsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { indexEndpoints_ = new java.util.ArrayList<com.google.cloud.visionai.v1.IndexEndpoint>(indexEndpoints_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.IndexEndpoint, com.google.cloud.visionai.v1.IndexEndpoint.Builder, com.google.cloud.visionai.v1.IndexEndpointOrBuilder> indexEndpointsBuilder_; /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.IndexEndpoint> getIndexEndpointsList() { if (indexEndpointsBuilder_ == null) { return java.util.Collections.unmodifiableList(indexEndpoints_); } else { return indexEndpointsBuilder_.getMessageList(); } } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public int getIndexEndpointsCount() { if (indexEndpointsBuilder_ == null) { return indexEndpoints_.size(); } else { return indexEndpointsBuilder_.getCount(); } } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public com.google.cloud.visionai.v1.IndexEndpoint getIndexEndpoints(int index) { if (indexEndpointsBuilder_ == null) { return indexEndpoints_.get(index); } else { return indexEndpointsBuilder_.getMessage(index); } } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder setIndexEndpoints(int index, com.google.cloud.visionai.v1.IndexEndpoint value) { if (indexEndpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexEndpointsIsMutable(); indexEndpoints_.set(index, value); onChanged(); } else { indexEndpointsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder setIndexEndpoints( int index, com.google.cloud.visionai.v1.IndexEndpoint.Builder builderForValue) { if (indexEndpointsBuilder_ == null) { ensureIndexEndpointsIsMutable(); indexEndpoints_.set(index, builderForValue.build()); onChanged(); } else { indexEndpointsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder addIndexEndpoints(com.google.cloud.visionai.v1.IndexEndpoint value) { if (indexEndpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexEndpointsIsMutable(); indexEndpoints_.add(value); onChanged(); } else { indexEndpointsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder addIndexEndpoints(int index, com.google.cloud.visionai.v1.IndexEndpoint value) { if (indexEndpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexEndpointsIsMutable(); indexEndpoints_.add(index, value); onChanged(); } else { indexEndpointsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder addIndexEndpoints( com.google.cloud.visionai.v1.IndexEndpoint.Builder builderForValue) { if (indexEndpointsBuilder_ == null) { ensureIndexEndpointsIsMutable(); indexEndpoints_.add(builderForValue.build()); onChanged(); } else { indexEndpointsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder addIndexEndpoints( int index, com.google.cloud.visionai.v1.IndexEndpoint.Builder builderForValue) { if (indexEndpointsBuilder_ == null) { ensureIndexEndpointsIsMutable(); indexEndpoints_.add(index, builderForValue.build()); onChanged(); } else { indexEndpointsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder addAllIndexEndpoints( java.lang.Iterable<? extends com.google.cloud.visionai.v1.IndexEndpoint> values) { if (indexEndpointsBuilder_ == null) { ensureIndexEndpointsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, indexEndpoints_); onChanged(); } else { indexEndpointsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder clearIndexEndpoints() { if (indexEndpointsBuilder_ == null) { indexEndpoints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { indexEndpointsBuilder_.clear(); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public Builder removeIndexEndpoints(int index) { if (indexEndpointsBuilder_ == null) { ensureIndexEndpointsIsMutable(); indexEndpoints_.remove(index); onChanged(); } else { indexEndpointsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public com.google.cloud.visionai.v1.IndexEndpoint.Builder getIndexEndpointsBuilder(int index) { return getIndexEndpointsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public com.google.cloud.visionai.v1.IndexEndpointOrBuilder getIndexEndpointsOrBuilder( int index) { if (indexEndpointsBuilder_ == null) { return indexEndpoints_.get(index); } else { return indexEndpointsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public java.util.List<? extends com.google.cloud.visionai.v1.IndexEndpointOrBuilder> getIndexEndpointsOrBuilderList() { if (indexEndpointsBuilder_ != null) { return indexEndpointsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(indexEndpoints_); } } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public com.google.cloud.visionai.v1.IndexEndpoint.Builder addIndexEndpointsBuilder() { return getIndexEndpointsFieldBuilder() .addBuilder(com.google.cloud.visionai.v1.IndexEndpoint.getDefaultInstance()); } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public com.google.cloud.visionai.v1.IndexEndpoint.Builder addIndexEndpointsBuilder(int index) { return getIndexEndpointsFieldBuilder() .addBuilder(index, com.google.cloud.visionai.v1.IndexEndpoint.getDefaultInstance()); } /** * * * <pre> * The list of IndexEndpoints. * </pre> * * <code>repeated .google.cloud.visionai.v1.IndexEndpoint index_endpoints = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.IndexEndpoint.Builder> getIndexEndpointsBuilderList() { return getIndexEndpointsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.IndexEndpoint, com.google.cloud.visionai.v1.IndexEndpoint.Builder, com.google.cloud.visionai.v1.IndexEndpointOrBuilder> getIndexEndpointsFieldBuilder() { if (indexEndpointsBuilder_ == null) { indexEndpointsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.IndexEndpoint, com.google.cloud.visionai.v1.IndexEndpoint.Builder, com.google.cloud.visionai.v1.IndexEndpointOrBuilder>( indexEndpoints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); indexEndpoints_ = null; } return indexEndpointsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListIndexEndpointsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListIndexEndpointsResponse) private static final com.google.cloud.visionai.v1.ListIndexEndpointsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListIndexEndpointsResponse(); } public static com.google.cloud.visionai.v1.ListIndexEndpointsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListIndexEndpointsResponse> PARSER = new com.google.protobuf.AbstractParser<ListIndexEndpointsResponse>() { @java.lang.Override public ListIndexEndpointsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListIndexEndpointsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListIndexEndpointsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexEndpointsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
37,035
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.types.logical.utils; import org.apache.flink.annotation.Internal; import org.apache.flink.table.types.logical.ArrayType; import org.apache.flink.table.types.logical.BinaryType; import org.apache.flink.table.types.logical.CharType; import org.apache.flink.table.types.logical.DateType; import org.apache.flink.table.types.logical.DayTimeIntervalType; import org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.DoubleType; import org.apache.flink.table.types.logical.LegacyTypeInformationType; import org.apache.flink.table.types.logical.LocalZonedTimestampType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.LogicalTypeRoot; import org.apache.flink.table.types.logical.MapType; import org.apache.flink.table.types.logical.MultisetType; import org.apache.flink.table.types.logical.NullType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.TimeType; import org.apache.flink.table.types.logical.TimestampType; import org.apache.flink.table.types.logical.VarBinaryType; import org.apache.flink.table.types.logical.VarCharType; import org.apache.flink.table.types.logical.YearMonthIntervalType; import org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution; import org.apache.flink.table.types.logical.ZonedTimestampType; import org.apache.flink.util.Preconditions; import javax.annotation.Nullable; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY_TO_MINUTE; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.DAY_TO_SECOND; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.HOUR; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.HOUR_TO_MINUTE; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.HOUR_TO_SECOND; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.MINUTE; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.MINUTE_TO_SECOND; import static org.apache.flink.table.types.logical.DayTimeIntervalType.DayTimeResolution.SECOND; import static org.apache.flink.table.types.logical.LogicalTypeFamily.APPROXIMATE_NUMERIC; import static org.apache.flink.table.types.logical.LogicalTypeFamily.BINARY_STRING; import static org.apache.flink.table.types.logical.LogicalTypeFamily.CHARACTER_STRING; import static org.apache.flink.table.types.logical.LogicalTypeFamily.DATETIME; import static org.apache.flink.table.types.logical.LogicalTypeFamily.EXACT_NUMERIC; import static org.apache.flink.table.types.logical.LogicalTypeFamily.INTERVAL; import static org.apache.flink.table.types.logical.LogicalTypeFamily.NUMERIC; import static org.apache.flink.table.types.logical.LogicalTypeFamily.TIME; import static org.apache.flink.table.types.logical.LogicalTypeFamily.TIMESTAMP; import static org.apache.flink.table.types.logical.LogicalTypeRoot.ARRAY; import static org.apache.flink.table.types.logical.LogicalTypeRoot.BINARY; import static org.apache.flink.table.types.logical.LogicalTypeRoot.CHAR; import static org.apache.flink.table.types.logical.LogicalTypeRoot.DATE; import static org.apache.flink.table.types.logical.LogicalTypeRoot.DECIMAL; import static org.apache.flink.table.types.logical.LogicalTypeRoot.DOUBLE; import static org.apache.flink.table.types.logical.LogicalTypeRoot.INTERVAL_DAY_TIME; import static org.apache.flink.table.types.logical.LogicalTypeRoot.INTERVAL_YEAR_MONTH; import static org.apache.flink.table.types.logical.LogicalTypeRoot.MAP; import static org.apache.flink.table.types.logical.LogicalTypeRoot.MULTISET; import static org.apache.flink.table.types.logical.LogicalTypeRoot.NULL; import static org.apache.flink.table.types.logical.LogicalTypeRoot.RAW; import static org.apache.flink.table.types.logical.LogicalTypeRoot.ROW; import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE; import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE; import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITH_TIME_ZONE; import static org.apache.flink.table.types.logical.LogicalTypeRoot.VARBINARY; import static org.apache.flink.table.types.logical.LogicalTypeRoot.VARCHAR; import static org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution.MONTH; import static org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution.YEAR; import static org.apache.flink.table.types.logical.YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH; import static org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsImplicitCast; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getLength; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getPrecision; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getScale; /** Utilities for merging multiple {@link LogicalType}. */ @Internal public final class LogicalTypeMerging { // mappings for interval generalization private static final Map<YearMonthResolution, List<YearMonthResolution>> YEAR_MONTH_RES_TO_BOUNDARIES = new HashMap<>(); private static final Map<List<YearMonthResolution>, YearMonthResolution> YEAR_MONTH_BOUNDARIES_TO_RES = new HashMap<>(); private static final int MINIMUM_ADJUSTED_SCALE = 6; static { addYearMonthMapping(YEAR, YEAR); addYearMonthMapping(MONTH, MONTH); addYearMonthMapping(YEAR_TO_MONTH, YEAR, MONTH); } private static final Map<DayTimeResolution, List<DayTimeResolution>> DAY_TIME_RES_TO_BOUNDARIES = new HashMap<>(); private static final Map<List<DayTimeResolution>, DayTimeResolution> DAY_TIME_BOUNDARIES_TO_RES = new HashMap<>(); static { addDayTimeMapping(DAY, DAY); addDayTimeMapping(DAY_TO_HOUR, DAY, HOUR); addDayTimeMapping(DAY_TO_MINUTE, DAY, MINUTE); addDayTimeMapping(DAY_TO_SECOND, DAY, SECOND); addDayTimeMapping(HOUR, HOUR); addDayTimeMapping(HOUR_TO_MINUTE, HOUR, MINUTE); addDayTimeMapping(HOUR_TO_SECOND, HOUR, SECOND); addDayTimeMapping(MINUTE, MINUTE); addDayTimeMapping(MINUTE_TO_SECOND, MINUTE, SECOND); addDayTimeMapping(SECOND, SECOND); } private static void addYearMonthMapping( YearMonthResolution to, YearMonthResolution... boundaries) { final List<YearMonthResolution> boundariesList = Arrays.asList(boundaries); YEAR_MONTH_RES_TO_BOUNDARIES.put(to, boundariesList); YEAR_MONTH_BOUNDARIES_TO_RES.put(boundariesList, to); } private static void addDayTimeMapping(DayTimeResolution to, DayTimeResolution... boundaries) { final List<DayTimeResolution> boundariesList = Arrays.asList(boundaries); DAY_TIME_RES_TO_BOUNDARIES.put(to, boundariesList); DAY_TIME_BOUNDARIES_TO_RES.put(boundariesList, to); } /** * Returns the most common, more general {@link LogicalType} for a given set of types. If such a * type exists, all given types can be casted to this more general type. * * <p>For example: {@code [INT, BIGINT, DECIMAL(2, 2)]} would lead to {@code DECIMAL(21, 2)}. * * <p>This class aims to be compatible with the SQL standard. It is inspired by Apache Calcite's * {@code SqlTypeFactoryImpl#leastRestrictive} method. */ public static Optional<LogicalType> findCommonType(List<LogicalType> types) { Preconditions.checkArgument(types.size() > 0, "List of types must not be empty."); // collect statistics first boolean hasRawType = false; boolean hasNullType = false; boolean hasNullableTypes = false; for (LogicalType type : types) { final LogicalTypeRoot typeRoot = type.getTypeRoot(); if (typeRoot == RAW) { hasRawType = true; } else if (typeRoot == NULL) { hasNullType = true; } if (type.isNullable()) { hasNullableTypes = true; } } final List<LogicalType> normalizedTypes = types.stream().map(t -> t.copy(true)).collect(Collectors.toList()); LogicalType foundType = findCommonNullableType(normalizedTypes, hasRawType, hasNullType); if (foundType == null) { foundType = findCommonCastableType(normalizedTypes); } if (foundType != null) { final LogicalType typeWithNullability = foundType.copy(hasNullableTypes); // NULL is reserved for untyped literals only if (typeWithNullability.is(NULL)) { return Optional.empty(); } return Optional.of(typeWithNullability); } return Optional.empty(); } // ========================= Decimal Precision Deriving ========================== // Adopted from "https://docs.microsoft.com/en-us/sql/t-sql/data-types/precision- // scale-and-length-transact-sql" // // Operation Result Precision Result Scale // e1 + e2 max(s1, s2) + max(p1-s1, p2-s2) + 1 max(s1, s2) // e1 - e2 max(s1, s2) + max(p1-s1, p2-s2) + 1 max(s1, s2) // e1 * e2 p1 + p2 + 1 s1 + s2 // e1 / e2 p1 - s1 + s2 + max(6, s1 + p2 + 1) max(6, s1 + p2 + 1) // e1 % e2 min(p1-s1, p2-s2) + max(s1, s2) max(s1, s2) // // Also, if the precision / scale are out of the range, the scale may be sacrificed // in order to prevent the truncation of the integer part of the decimals. /** Finds the result type of a decimal division operation. */ public static DecimalType findDivisionDecimalType( int precision1, int scale1, int precision2, int scale2) { int scale = Math.max(6, scale1 + precision2 + 1); int precision = precision1 - scale1 + scale2 + scale; return adjustPrecisionScale(precision, scale); } /** Finds the result type of a decimal modulo operation. */ public static DecimalType findModuloDecimalType( int precision1, int scale1, int precision2, int scale2) { final int scale = Math.max(scale1, scale2); int precision = Math.min(precision1 - scale1, precision2 - scale2) + scale; return adjustPrecisionScale(precision, scale); } /** Finds the result type of a decimal multiplication operation. */ public static DecimalType findMultiplicationDecimalType( int precision1, int scale1, int precision2, int scale2) { int scale = scale1 + scale2; int precision = precision1 + precision2 + 1; return adjustPrecisionScale(precision, scale); } /** Finds the result type of a decimal addition operation. */ public static DecimalType findAdditionDecimalType( int precision1, int scale1, int precision2, int scale2) { final int scale = Math.max(scale1, scale2); int precision = Math.max(precision1 - scale1, precision2 - scale2) + scale + 1; return adjustPrecisionScale(precision, scale); } /** Finds the result type of a decimal rounding operation. */ public static DecimalType findRoundDecimalType(int precision, int scale, int round) { if (round >= scale) { return new DecimalType(false, precision, scale); } if (round < 0) { return new DecimalType( false, Math.min(DecimalType.MAX_PRECISION, 1 + precision - scale), 0); } // 0 <= r < s // NOTE: rounding may increase the digits by 1, therefore we need +1 on precisions. return new DecimalType(false, 1 + precision - scale + round, round); } /** Finds the result type of a decimal average aggregation. */ public static LogicalType findAvgAggType(LogicalType argType) { final LogicalType resultType; if (argType.is(DECIMAL)) { // a hack to make legacy types possible until we drop them if (argType instanceof LegacyTypeInformationType) { return argType; } // adopted from // https://docs.microsoft.com/en-us/sql/t-sql/functions/avg-transact-sql // however, we count by BIGINT, therefore divide by DECIMAL(20,0), // but the end result is actually the same, which is DECIMAL(38, MAX(6, s)). resultType = LogicalTypeMerging.findDivisionDecimalType(38, getScale(argType), 20, 0); } else { resultType = argType; } return resultType.copy(argType.isNullable()); } /** Finds the result type of a decimal sum aggregation. */ public static LogicalType findSumAggType(LogicalType argType) { // adopted from // https://docs.microsoft.com/en-us/sql/t-sql/functions/sum-transact-sql final LogicalType resultType; if (argType.is(DECIMAL)) { // a hack to make legacy types possible until we drop them if (argType instanceof LegacyTypeInformationType) { return argType; } resultType = new DecimalType(false, 38, getScale(argType)); } else { resultType = argType; } return resultType.copy(argType.isNullable()); } // -------------------------------------------------------------------------------------------- /** * Scale adjustment implementation is inspired to SQLServer's one. In particular, when a result * precision is greater than MAX_PRECISION, the corresponding scale is reduced to prevent the * integral part of a result from being truncated. * * <p>https://docs.microsoft.com/en-us/sql/t-sql/data-types/precision-scale-and-length-transact-sql * * <p>The rules (although inspired by SQL Server) are not followed 100%, instead the approach of * Spark/Hive is followed for adjusting the precision. * * <p>http://www.openkb.info/2021/05/understand-decimal-precision-and-scale.html * * <p>For (38, 8) + (32, 8) -> (39, 8) (The rules for addition, initially calculate a decimal * type, assuming its precision is infinite) results in a decimal with integral part of 31 * digits. * * <p>This method is called subsequently to adjust the resulting decimal since the maximum * allowed precision is 38 (so far a precision of 39 is calculated in the first step). So, the * rounding for SQL Server would be: (39, 8) -> (38, 8) // integral part: 30, but instead we * follow the Hive/Spark approach which gives: (39, 8) -> (38, 7) // integral part: 31 */ private static DecimalType adjustPrecisionScale(int precision, int scale) { if (precision <= DecimalType.MAX_PRECISION) { // Adjustment only needed when we exceed max precision return new DecimalType(false, precision, scale); } else { int digitPart = precision - scale; // If original scale is less than MINIMUM_ADJUSTED_SCALE, use original scale value; // otherwise preserve at least MINIMUM_ADJUSTED_SCALE fractional digits int minScalePart = Math.min(scale, MINIMUM_ADJUSTED_SCALE); int adjustScale = Math.max(DecimalType.MAX_PRECISION - digitPart, minScalePart); return new DecimalType(false, DecimalType.MAX_PRECISION, adjustScale); } } private static @Nullable LogicalType findCommonCastableType(List<LogicalType> normalizedTypes) { LogicalType resultType = normalizedTypes.get(0); for (LogicalType type : normalizedTypes) { final LogicalTypeRoot typeRoot = type.getTypeRoot(); // NULL does not affect the result of this loop if (typeRoot == NULL) { continue; } if (supportsImplicitCast(resultType, type)) { resultType = type; } else { if (!supportsImplicitCast(type, resultType)) { return null; } } } return resultType; } @SuppressWarnings("ConstantConditions") private static @Nullable LogicalType findCommonNullableType( List<LogicalType> normalizedTypes, boolean hasRawType, boolean hasNullType) { // all RAW types must be equal if (hasRawType) { return findExactlySameType(normalizedTypes); } LogicalType resultType = null; for (LogicalType type : normalizedTypes) { final LogicalTypeRoot typeRoot = type.getTypeRoot(); // NULL does not affect the result of this loop if (typeRoot == NULL) { continue; } // if result type is still null, consider the current type as a potential // result type candidate if (resultType == null) { resultType = type; } // find special patterns final LogicalType patternType = findCommonTypePattern(resultType, type); if (patternType != null) { resultType = patternType; continue; } // for types of family CONSTRUCTED if (typeRoot == ARRAY) { return findCommonArrayType(normalizedTypes); } else if (typeRoot == MULTISET) { return findCommonMultisetType(normalizedTypes); } else if (typeRoot == MAP) { return findCommonMapType(normalizedTypes); } else if (typeRoot == ROW) { return findCommonRowType(normalizedTypes); } // exit if two completely different types are compared (e.g. ROW and INT) // this simplifies the following lines as we compare same interval families for example if (!areSimilarTypes(resultType, type)) { return null; } // for types of family CHARACTER_STRING or BINARY_STRING if (type.is(CHARACTER_STRING) | type.is(BINARY_STRING)) { final int length = combineLength(resultType, type); if (resultType.isAnyOf(VARCHAR, VARBINARY)) { // variable length types remain variable length types resultType = createStringType(resultType.getTypeRoot(), length); } else if (getLength(resultType) != getLength(type)) { // for different fixed lengths // this is different from the SQL standard but prevents whitespace // padding/modification of strings if (resultType.is(CHAR)) { resultType = createStringType(VARCHAR, length); } else if (resultType.is(BINARY)) { resultType = createStringType(VARBINARY, length); } } else { // for same type with same length resultType = createStringType(typeRoot, length); } } // for EXACT_NUMERIC types else if (type.is(EXACT_NUMERIC)) { if (resultType.is(EXACT_NUMERIC)) { resultType = createCommonExactNumericType(resultType, type); } else if (resultType.is(APPROXIMATE_NUMERIC)) { // the result is already approximate if (typeRoot == DECIMAL) { // in case of DECIMAL we enforce DOUBLE resultType = new DoubleType(); } } else { return null; } } // for APPROXIMATE_NUMERIC types else if (type.is(APPROXIMATE_NUMERIC)) { if (resultType.is(APPROXIMATE_NUMERIC)) { resultType = createCommonApproximateNumericType(resultType, type); } else if (resultType.is(EXACT_NUMERIC)) { // the result was exact so far if (typeRoot == DECIMAL) { // in case of DECIMAL we enforce DOUBLE resultType = new DoubleType(); } else { // enforce an approximate result resultType = type; } } else { return null; } } // for DATE else if (type.is(DATE)) { if (resultType.is(DATE)) { resultType = new DateType(); // for enabling findCommonTypePattern } else { return null; } } // for TIME else if (type.is(TIME)) { if (resultType.is(TIME)) { resultType = new TimeType(combinePrecision(resultType, type)); } else { return null; } } // for TIMESTAMP else if (type.is(TIMESTAMP)) { if (resultType.is(TIMESTAMP)) { resultType = createCommonTimestampType(resultType, type); } else { return null; } } // for day-time intervals else if (typeRoot == INTERVAL_DAY_TIME) { resultType = createCommonDayTimeIntervalType( (DayTimeIntervalType) resultType, (DayTimeIntervalType) type); } // for year-month intervals else if (typeRoot == INTERVAL_YEAR_MONTH) { resultType = createCommonYearMonthIntervalType( (YearMonthIntervalType) resultType, (YearMonthIntervalType) type); } // other types are handled by findCommonCastableType else { return null; } } // NULL type only if (resultType == null && hasNullType) { return new NullType(); } return resultType; } private static boolean areSimilarTypes(LogicalType left, LogicalType right) { // two types are similar iff they can be the operands of an SQL equality predicate // similarity based on families if (left.is(CHARACTER_STRING) && right.is(CHARACTER_STRING)) { return true; } else if (left.is(BINARY_STRING) && right.is(BINARY_STRING)) { return true; } else if (left.is(NUMERIC) && right.is(NUMERIC)) { return true; } else if (left.is(TIME) && right.is(TIME)) { return true; } else if (left.is(TIMESTAMP) && right.is(TIMESTAMP)) { return true; } // similarity based on root return left.getTypeRoot() == right.getTypeRoot(); } private static @Nullable LogicalType findExactlySameType(List<LogicalType> normalizedTypes) { final LogicalType firstType = normalizedTypes.get(0); for (LogicalType type : normalizedTypes) { if (!type.equals(firstType)) { return null; } } return firstType; } private static @Nullable LogicalType findCommonTypePattern( LogicalType resultType, LogicalType type) { if (resultType.is(DATETIME) && type.is(INTERVAL)) { return resultType; } else if (resultType.is(INTERVAL) && type.is(DATETIME)) { return type; } else if ((resultType.is(TIMESTAMP) || resultType.is(DATE)) && type.is(EXACT_NUMERIC)) { return resultType; } else if (resultType.is(EXACT_NUMERIC) && (type.is(TIMESTAMP) || type.is(DATE))) { return type; } // for "DATETIME + EXACT_NUMERIC", EXACT_NUMERIC is always treated as an interval of days // therefore, TIME + EXACT_NUMERIC is not supported return null; } private static @Nullable LogicalType findCommonArrayType(List<LogicalType> normalizedTypes) { final List<LogicalType> children = findCommonChildrenTypes(normalizedTypes); if (children == null) { return null; } return new ArrayType(children.get(0)); } private static @Nullable LogicalType findCommonMultisetType(List<LogicalType> normalizedTypes) { final List<LogicalType> children = findCommonChildrenTypes(normalizedTypes); if (children == null) { return null; } return new MultisetType(children.get(0)); } private static @Nullable LogicalType findCommonMapType(List<LogicalType> normalizedTypes) { final List<LogicalType> children = findCommonChildrenTypes(normalizedTypes); if (children == null) { return null; } return new MapType(children.get(0), children.get(1)); } private static @Nullable LogicalType findCommonRowType(List<LogicalType> normalizedTypes) { final List<LogicalType> children = findCommonChildrenTypes(normalizedTypes); if (children == null) { return null; } final RowType firstType = (RowType) normalizedTypes.get(0); final List<RowType.RowField> newFields = IntStream.range(0, children.size()) .mapToObj( pos -> { final LogicalType newType = children.get(pos); final RowType.RowField originalField = firstType.getFields().get(pos); if (originalField.getDescription().isPresent()) { return new RowType.RowField( originalField.getName(), newType, originalField.getDescription().get()); } else { return new RowType.RowField( originalField.getName(), newType); } }) .collect(Collectors.toList()); return new RowType(newFields); } private static @Nullable List<LogicalType> findCommonChildrenTypes( List<LogicalType> normalizedTypes) { final LogicalType firstType = normalizedTypes.get(0); final LogicalTypeRoot typeRoot = firstType.getTypeRoot(); final int numberOfChildren = firstType.getChildren().size(); for (LogicalType type : normalizedTypes) { // all types must have the same root if (type.getTypeRoot() != typeRoot) { return null; } // all types must have the same number of children if (type.getChildren().size() != numberOfChildren) { return null; } } // recursively compute column-wise least restrictive final List<LogicalType> resultChildren = new ArrayList<>(numberOfChildren); for (int i = 0; i < numberOfChildren; i++) { final Optional<LogicalType> childType = findCommonType(new ChildTypeView(normalizedTypes, i)); if (!childType.isPresent()) { return null; } resultChildren.add(childType.get()); } // no child should be empty at this point return resultChildren; } private static LogicalType createCommonExactNumericType( LogicalType resultType, LogicalType type) { // same EXACT_NUMERIC types if (type.equals(resultType)) { return resultType; } final LogicalTypeRoot resultTypeRoot = resultType.getTypeRoot(); final LogicalTypeRoot typeRoot = type.getTypeRoot(); // no DECIMAL types involved if (resultTypeRoot != DECIMAL && typeRoot != DECIMAL) { // type root contains order of precision if (getPrecision(type) > getPrecision(resultType)) { return type; } return resultType; } // determine DECIMAL with precision (p), scale (s) and number of whole digits (d): // d = max(p1 - s1, p2 - s2) // s <= max(s1, s2) // p = s + d final int p1 = getPrecision(resultType); final int p2 = getPrecision(type); final int s1 = getScale(resultType); final int s2 = getScale(type); final int maxPrecision = DecimalType.MAX_PRECISION; int d = Math.max(p1 - s1, p2 - s2); d = Math.min(d, maxPrecision); int s = Math.max(s1, s2); s = Math.min(s, maxPrecision - d); final int p = d + s; return new DecimalType(p, s); } private static LogicalType createCommonApproximateNumericType( LogicalType resultType, LogicalType type) { if (resultType.is(DOUBLE) || type.is(DOUBLE)) { return new DoubleType(); } return resultType; } private static LogicalType createCommonTimestampType(LogicalType resultType, LogicalType type) { // same types if (type.equals(resultType)) { return resultType; } final LogicalTypeRoot resultTypeRoot = resultType.getTypeRoot(); final LogicalTypeRoot typeRoot = type.getTypeRoot(); final int precision = combinePrecision(resultType, type); // same type roots if (typeRoot == resultTypeRoot) { return createTimestampType(resultTypeRoot, precision); } // generalize to zoned type if (typeRoot == TIMESTAMP_WITH_TIME_ZONE || resultTypeRoot == TIMESTAMP_WITH_TIME_ZONE) { return createTimestampType(TIMESTAMP_WITH_TIME_ZONE, precision); } else if (typeRoot == TIMESTAMP_WITH_LOCAL_TIME_ZONE || resultTypeRoot == TIMESTAMP_WITH_LOCAL_TIME_ZONE) { return createTimestampType(TIMESTAMP_WITH_LOCAL_TIME_ZONE, precision); } return createTimestampType(TIMESTAMP_WITHOUT_TIME_ZONE, precision); } private static LogicalType createCommonDayTimeIntervalType( DayTimeIntervalType resultType, DayTimeIntervalType type) { final int maxDayPrecision = Math.max(resultType.getDayPrecision(), type.getDayPrecision()); final int maxFractionalPrecision = Math.max(resultType.getFractionalPrecision(), type.getFractionalPrecision()); return new DayTimeIntervalType( combineIntervalResolutions( DayTimeResolution.values(), DAY_TIME_RES_TO_BOUNDARIES, DAY_TIME_BOUNDARIES_TO_RES, resultType.getResolution(), type.getResolution()), maxDayPrecision, maxFractionalPrecision); } private static LogicalType createCommonYearMonthIntervalType( YearMonthIntervalType resultType, YearMonthIntervalType type) { final int maxYearPrecision = Math.max(resultType.getYearPrecision(), type.getYearPrecision()); return new YearMonthIntervalType( combineIntervalResolutions( YearMonthResolution.values(), YEAR_MONTH_RES_TO_BOUNDARIES, YEAR_MONTH_BOUNDARIES_TO_RES, resultType.getResolution(), type.getResolution()), maxYearPrecision); } private static LogicalType createTimestampType(LogicalTypeRoot typeRoot, int precision) { switch (typeRoot) { case TIMESTAMP_WITHOUT_TIME_ZONE: return new TimestampType(precision); case TIMESTAMP_WITH_TIME_ZONE: return new ZonedTimestampType(precision); case TIMESTAMP_WITH_LOCAL_TIME_ZONE: return new LocalZonedTimestampType(precision); default: throw new IllegalArgumentException(); } } private static LogicalType createStringType(LogicalTypeRoot typeRoot, int length) { switch (typeRoot) { case CHAR: if (length == 0) { return CharType.ofEmptyLiteral(); } return new CharType(length); case VARCHAR: if (length == 0) { return VarCharType.ofEmptyLiteral(); } return new VarCharType(length); case BINARY: if (length == 0) { return BinaryType.ofEmptyLiteral(); } return new BinaryType(length); case VARBINARY: if (length == 0) { return VarBinaryType.ofEmptyLiteral(); } return new VarBinaryType(length); default: throw new IllegalArgumentException(); } } private static <T extends Enum<T>> T combineIntervalResolutions( T[] res, Map<T, List<T>> resToBoundaries, Map<List<T>, T> boundariesToRes, T left, T right) { final List<T> leftBoundaries = resToBoundaries.get(left); final T leftStart = leftBoundaries.get(0); final T leftEnd = leftBoundaries.get(leftBoundaries.size() - 1); final List<T> rightBoundaries = resToBoundaries.get(right); final T rightStart = rightBoundaries.get(0); final T rightEnd = rightBoundaries.get(rightBoundaries.size() - 1); final T combinedStart = res[Math.min(leftStart.ordinal(), rightStart.ordinal())]; final T combinedEnd = res[Math.max(leftEnd.ordinal(), rightEnd.ordinal())]; if (combinedStart == combinedEnd) { return boundariesToRes.get(Collections.singletonList(combinedStart)); } return boundariesToRes.get(Arrays.asList(combinedStart, combinedEnd)); } private static int combinePrecision(LogicalType resultType, LogicalType type) { final int p1 = getPrecision(resultType); final int p2 = getPrecision(type); return Math.max(p1, p2); } private static int combineLength(LogicalType resultType, LogicalType right) { return Math.max(getLength(resultType), getLength(right)); } /** A list that creates a view of all children at the given position. */ private static class ChildTypeView extends AbstractList<LogicalType> { private final List<LogicalType> types; private final int childPos; ChildTypeView(List<LogicalType> types, int childPos) { this.types = types; this.childPos = childPos; } @Override public LogicalType get(int index) { return types.get(index).getChildren().get(childPos); } @Override public int size() { return types.size(); } } private LogicalTypeMerging() { // no instantiation } }
apache/polygene-java
36,866
core/api/src/main/java/org/apache/polygene/api/query/QueryExpressions.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package org.apache.polygene.api.query; import java.lang.reflect.Field; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.function.Predicate; import org.apache.polygene.api.association.Association; import org.apache.polygene.api.association.GenericAssociationInfo; import org.apache.polygene.api.association.ManyAssociation; import org.apache.polygene.api.association.NamedAssociation; import org.apache.polygene.api.composite.Composite; import org.apache.polygene.api.identity.HasIdentity; import org.apache.polygene.api.identity.Identity; import org.apache.polygene.api.injection.scope.State; import org.apache.polygene.api.property.GenericPropertyInfo; import org.apache.polygene.api.property.Property; import org.apache.polygene.api.query.grammar.AndPredicate; import org.apache.polygene.api.query.grammar.AssociationFunction; import org.apache.polygene.api.query.grammar.AssociationNotNullPredicate; import org.apache.polygene.api.query.grammar.AssociationNullPredicate; import org.apache.polygene.api.query.grammar.ContainsAllPredicate; import org.apache.polygene.api.query.grammar.ContainsPredicate; import org.apache.polygene.api.query.grammar.EqPredicate; import org.apache.polygene.api.query.grammar.GePredicate; import org.apache.polygene.api.query.grammar.GtPredicate; import org.apache.polygene.api.query.grammar.LePredicate; import org.apache.polygene.api.query.grammar.LtPredicate; import org.apache.polygene.api.query.grammar.ManyAssociationContainsPredicate; import org.apache.polygene.api.query.grammar.ManyAssociationFunction; import org.apache.polygene.api.query.grammar.MatchesPredicate; import org.apache.polygene.api.query.grammar.NamedAssociationContainsNamePredicate; import org.apache.polygene.api.query.grammar.NamedAssociationContainsPredicate; import org.apache.polygene.api.query.grammar.NamedAssociationFunction; import org.apache.polygene.api.query.grammar.NePredicate; import org.apache.polygene.api.query.grammar.Notpredicate; import org.apache.polygene.api.query.grammar.OrPredicate; import org.apache.polygene.api.query.grammar.OrderBy; import org.apache.polygene.api.query.grammar.PropertyFunction; import org.apache.polygene.api.query.grammar.PropertyNotNullPredicate; import org.apache.polygene.api.query.grammar.PropertyNullPredicate; import org.apache.polygene.api.query.grammar.PropertyReference; import org.apache.polygene.api.query.grammar.Variable; import static org.apache.polygene.api.identity.HasIdentity.IDENTITY_METHOD; /** * Static factory methods for query expressions and operators. */ public final class QueryExpressions { // This is used for eq(Association,Composite) // Templates and variables -----------------------------------------------| /** * Create a Query Template using the given type. * * @param <T> the type of the template * @param clazz a class declaring the type of the template * * @return a new Query Template */ public static <T> T templateFor( Class<T> clazz ) { Objects.requireNonNull( clazz,"Template class" ); if( clazz.isInterface() ) { return clazz.cast( Proxy.newProxyInstance( clazz.getClassLoader(), array( clazz ), new TemplateHandler<T>( null, null, null, null ) ) ); } else { try { T mixin = clazz.newInstance(); for( Field field : clazz.getFields() ) { if( field.getAnnotation( State.class ) != null ) { if( field.getType().equals( Property.class ) ) { field.set( mixin, Proxy.newProxyInstance( field.getType().getClassLoader(), array( field.getType() ), new PropertyReferenceHandler<>( new PropertyFunction<T>( null, null, null, null, field ) ) ) ); } else if( field.getType().equals( Association.class ) ) { field.set( mixin, Proxy.newProxyInstance( field.getType().getClassLoader(), array( field.getType() ), new AssociationReferenceHandler<>( new AssociationFunction<T>( null, null, null, field ) ) ) ); } else if( field.getType().equals( ManyAssociation.class ) ) { field.set( mixin, Proxy.newProxyInstance( field.getType().getClassLoader(), array( field.getType() ), new ManyAssociationReferenceHandler<>( new ManyAssociationFunction<T>( null, null, null, field ) ) ) ); } else if( field.getType().equals( NamedAssociation.class ) ) { field.set( mixin, Proxy.newProxyInstance( field.getType().getClassLoader(), array( field.getType() ), new NamedAssociationReferenceHandler<>( new NamedAssociationFunction<T>( null, null, null, field ) ) ) ); } } } return mixin; } catch( IllegalAccessException | IllegalArgumentException | InstantiationException | SecurityException e ) { throw new IllegalArgumentException( "Cannot use class as template", e ); } } } /** * Create a Query Template using the given mixin class and association. * * @param <T> the type of the template * @param mixinType a class declaring the type of the template * @param association an association * * @return a new Query Template */ public static <T> T templateFor( final Class<T> mixinType, Association<?> association ) { Objects.requireNonNull( mixinType, "Mixin class" ); Objects.requireNonNull( association, "Association" ); return mixinType.cast( Proxy.newProxyInstance( mixinType.getClassLoader(), array( mixinType ), new TemplateHandler<T>( null, association( association ), null, null ) ) ); } public static <T> T oneOf( final ManyAssociation<T> association ) { Objects.requireNonNull( association, "association" ); return association.get( 0 ); } public static <T> T oneOf( final NamedAssociation<T> association ) { Objects.requireNonNull( association, "Association" ); Iterator<String> iterator = association.iterator(); return association.get( iterator.hasNext() ? iterator.next() : null ); } /** * Create a new Query Variable. * * @param name a name for the Variable * * @return a new Query Variable. */ public static Variable variable( String name ) { Objects.requireNonNull( name, "Variable name" ); return new Variable( name ); } /** * Create a new Query Template PropertyFunction. * * @param <T> type of the Property * @param property a Property * * @return a new Query Template PropertyFunction */ @SuppressWarnings( "unchecked" ) public static <T> PropertyFunction<T> property( Property<T> property ) { return ( (PropertyReferenceHandler<T>) Proxy.getInvocationHandler( property ) ).property(); } /** * Create a new Query Property instance. * * @param <T> type of the Property * @param mixinClass mixin of the Property * @param fieldName name of the Property field * * @return a new Query Property instance for the given mixin and property name. */ @SuppressWarnings( "unchecked" ) public static <T> Property<T> property( Class<?> mixinClass, String fieldName ) { try { Field field = mixinClass.getField( fieldName ); if( !Property.class.isAssignableFrom( field.getType() ) ) { throw new IllegalArgumentException( "Field must be of type Property<?>" ); } return (Property<T>) Proxy.newProxyInstance( mixinClass.getClassLoader(), array( field.getType() ), new PropertyReferenceHandler<>( new PropertyFunction<T>( null, null, null, null, field ) ) ); } catch( NoSuchFieldException e ) { throw new IllegalArgumentException( "No such field '" + fieldName + "' in mixin " + mixinClass.getName() ); } } /** * Create a new Query Template AssociationFunction. * * @param <T> type of the Association * @param association an Association * * @return a new Query Template AssociationFunction */ @SuppressWarnings( "unchecked" ) public static <T> AssociationFunction<T> association( Association<T> association ) { return ( (AssociationReferenceHandler<T>) Proxy.getInvocationHandler( association ) ).association(); } /** * Create a new Query Template ManyAssociationFunction. * * @param <T> type of the ManyAssociation * @param association a ManyAssociation * * @return a new Query Template ManyAssociationFunction */ @SuppressWarnings( "unchecked" ) public static <T> ManyAssociationFunction<T> manyAssociation( ManyAssociation<T> association ) { return ( (ManyAssociationReferenceHandler<T>) Proxy.getInvocationHandler( association ) ).manyAssociation(); } /** * Create a new Query Template NamedAssociationFunction. * * @param <T> type of the NamedAssociation * @param association a NamedAssociation * * @return a new Query Template NamedAssociationFunction */ @SuppressWarnings( "unchecked" ) public static <T> NamedAssociationFunction<T> namedAssociation( NamedAssociation<T> association ) { return ( (NamedAssociationReferenceHandler<T>) Proxy.getInvocationHandler( association ) ).namedAssociation(); } // And/Or/Not ------------------------------------------------------------| /** * Create a new AND specification. * * @param left first operand * @param right second operand * @param optionalRight optional operands * * @return a new AND specification */ @SafeVarargs public static AndPredicate and( Predicate<Composite> left, Predicate<Composite> right, Predicate<Composite>... optionalRight ) { List<Predicate<Composite>> predicates = new ArrayList<>( 2 + optionalRight.length ); predicates.add( left ); predicates.add( right ); Collections.addAll( predicates, optionalRight ); return new AndPredicate( predicates ); } /** * Create a new OR specification. * * @param specs operands * * @return a new OR specification */ @SafeVarargs public static OrPredicate or( Predicate<Composite>... specs ) { return new OrPredicate( Arrays.asList( specs ) ); } /** * Create a new NOT specification. * * @param operand specification to be negated * * @return a new NOT specification */ public static Notpredicate not( Predicate<Composite> operand ) { return new Notpredicate( operand ); } // Comparisons -----------------------------------------------------------| /** * Create a new EQUALS specification for a Property. * * @param <T> Property type * @param property a Property * @param value its value * * @return a new EQUALS specification for a Property. */ public static <T> EqPredicate<T> eq( Property<T> property, T value ) { return new EqPredicate<>( property( property ), value ); } /** * Create a new EQUALS specification for a Property using a named Variable. * * @param <T> Property type * @param property a Property * @param variable a Query Variable * * @return a new EQUALS specification for a Property using a named Variable. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> EqPredicate<T> eq( Property<T> property, Variable variable ) { return new EqPredicate( property( property ), variable ); } /** * Create a new EQUALS specification for an Association. * * @param <T> Association type * @param association an Association * @param value its value * * @return a new EQUALS specification for an Association. */ public static <T> EqPredicate<Identity> eq( Association<T> association, T value ) { return new EqPredicate<>( new PropertyFunction<>( null, association(association), null, null, IDENTITY_METHOD), ((HasIdentity) value).identity().get()); } /** * Create a new GREATER OR EQUALS specification for a Property. * * @param <T> Property type * @param property a Property * @param value its value * * @return a new GREATER OR EQUALS specification for a Property. */ public static <T> GePredicate<T> ge( Property<T> property, T value ) { return new GePredicate<>( property( property ), value ); } /** * Create a new GREATER OR EQUALS specification for a Property using a named Variable. * * @param <T> Property type * @param property a Property * @param variable a Query Variable * * @return a new GREATER OR EQUALS specification for a Property using a named Variable. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> GePredicate<T> ge( Property<T> property, Variable variable ) { return new GePredicate( property( property ), variable ); } /** * Create a new GREATER THAN specification for a Property. * * @param <T> Property type * @param property a Property * @param value its value * * @return a new GREATER THAN specification for a Property. */ public static <T> GtPredicate<T> gt( Property<T> property, T value ) { return new GtPredicate<>( property( property ), value ); } /** * Create a new GREATER THAN specification for a Property using a named Variable. * * @param <T> Property type * @param property a Property * @param variable a Query Variable * * @return a new GREATER THAN specification for a Property using a named Variable. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> GtPredicate<T> gt( Property<T> property, Variable variable ) { return new GtPredicate( property( property ), variable ); } /** * Create a new LESS OR EQUALS specification for a Property. * * @param <T> Property type * @param property a Property * @param value its value * * @return a new LESS OR EQUALS specification for a Property. */ public static <T> LePredicate<T> le( Property<T> property, T value ) { return new LePredicate<>( property( property ), value ); } /** * Create a new LESS OR EQUALS specification for a Property using a named Variable. * * @param <T> Property type * @param property a Property * @param variable a Query Variable * * @return a new LESS OR EQUALS specification for a Property using a named Variable. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> LePredicate<T> le( Property<T> property, Variable variable ) { return new LePredicate( property( property ), variable ); } /** * Create a new LESSER THAN specification for a Property. * * @param <T> Property type * @param property a Property * @param value its value * * @return a new LESSER THAN specification for a Property. */ public static <T> LtPredicate<T> lt( Property<T> property, T value ) { return new LtPredicate<>( property( property ), value ); } /** * Create a new LESSER THAN specification for a Property using a named Variable. * * @param <T> Property type * @param property a Property * @param variable a Query Variable * * @return a new LESSER THAN specification for a Property using a named Variable. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> LtPredicate<T> lt( Property<T> property, Variable variable ) { return new LtPredicate( property( property ), variable ); } /** * Create a new NOT EQUALS specification for a Property. * * @param <T> Property type * @param property a Property * @param value its value * * @return a new NOT EQUALS specification for a Property. */ public static <T> NePredicate<T> ne( Property<T> property, T value ) { return new NePredicate<>( property( property ), value ); } /** * Create a new NOT EQUALS specification for a Property using a named Variable. * * @param <T> Property type * @param property a Property * @param variable a Query Variable * * @return a new NOT EQUALS specification for a Property using a named Variable. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> NePredicate<T> ne( Property<T> property, Variable variable ) { return new NePredicate( property( property ), variable ); } /** * Create a new REGULAR EXPRESSION specification for a Property. * * @param property a Property * @param regexp its value * * @return a new REGULAR EXPRESSION specification for a Property. */ public static MatchesPredicate matches( Property<String> property, String regexp ) { return new MatchesPredicate( property( property ), regexp ); } /** * Create a new REGULAR EXPRESSION specification for a Property using a named Variable. * * @param property a Property * @param variable a Query Variable * * @return a new REGULAR EXPRESSION specification for a Property using a named Variable. */ public static MatchesPredicate matches( Property<String> property, Variable variable ) { return new MatchesPredicate( property( property ), variable ); } // Null checks -----------------------------------------------------------| /** * Create a new NOT NULL specification for a Property. * * @param <T> Property type * @param property a Property * * @return a new NOT NULL specification for a Property. */ public static <T> PropertyNotNullPredicate<T> isNotNull( Property<T> property ) { return new PropertyNotNullPredicate<>( property( property ) ); } /** * Create a new NULL specification for a Property. * * @param <T> Property type * @param property a Property * * @return a new NULL specification for a Property. */ public static <T> PropertyNullPredicate<T> isNull( Property<T> property ) { return new PropertyNullPredicate<>( property( property ) ); } /** * Create a new NOT NULL specification for an Association. * * @param <T> Association type * @param association an Association * * @return a new NOT NULL specification for an Association. */ public static <T> AssociationNotNullPredicate<T> isNotNull( Association<T> association ) { return new AssociationNotNullPredicate<>( association( association ) ); } /** * Create a new NULL specification for an Association. * * @param <T> Association type * @param association an Association * * @return a new NULL specification for an Association. */ public static <T> AssociationNullPredicate<T> isNull( Association<T> association ) { return new AssociationNullPredicate<>( association( association ) ); } // Collections -----------------------------------------------------------| /** * Create a new CONTAINS ALL specification for a Collection Property. * * @param <T> Collection property type * @param collectionProperty a Collection Property * @param values its values * * @return a new CONTAINS ALL specification for a Collection Property. */ public static <T> ContainsAllPredicate<T> containsAll( Property<? extends Collection<T>> collectionProperty, Collection<T> values ) { Objects.requireNonNull( values, "Values" ); return new ContainsAllPredicate<>( property( collectionProperty ), values ); } /** * Create a new CONTAINS ALL specification for a Collection Property using named Variables. * * @param <T> Collection property type * @param collectionProperty a Collection Property * @param variables named Variables * * @return a new CONTAINS ALL specification for a Collection Property using named Variables. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> ContainsAllPredicate<T> containsAllVariables( Property<? extends Collection<T>> collectionProperty, Collection<Variable> variables ) { Objects.requireNonNull( variables, "Variables" ); return new ContainsAllPredicate( property( collectionProperty ), variables ); } /** * Create a new CONTAINS specification for a Collection Property. * * @param <T> Collection property type * @param collectionProperty a Collection Property * @param value the value * * @return a new CONTAINS specification for a Collection Property. */ public static <T> ContainsPredicate<T> contains( Property<? extends Collection<T>> collectionProperty, T value ) { Objects.requireNonNull( value, "Value" ); return new ContainsPredicate<>( property( collectionProperty ), value ); } /** * Create a new CONTAINS specification for a Collection Property using named Variables. * * @param <T> Collection property type * @param collectionProperty a Collection Property * @param variable named Variable * * @return a new CONTAINS specification for a Collection Property using named Variables. */ @SuppressWarnings( {"raw", "unchecked"} ) public static <T> ContainsPredicate<T> contains( Property<? extends Collection<T>> collectionProperty, Variable variable ) { Objects.requireNonNull( variable, "Variable" ); return new ContainsPredicate( property( collectionProperty ), variable ); } /** * Create a new CONTAINS specification for a ManyAssociation. * * @param <T> ManyAssociation type * @param manyAssoc a ManyAssociation * @param value the value * * @return a new CONTAINS specification for a ManyAssociation. */ public static <T> ManyAssociationContainsPredicate<T> contains( ManyAssociation<T> manyAssoc, T value ) { return new ManyAssociationContainsPredicate<>( manyAssociation( manyAssoc ), value ); } /** * Create a new CONTAINS specification for a NamedAssociation. * * @param <T> NamedAssociation type * @param namedAssoc a NamedAssociation * @param value the value * * @return a new CONTAINS specification for a NamedAssociation. */ public static <T> NamedAssociationContainsPredicate<T> contains( NamedAssociation<T> namedAssoc, T value ) { return new NamedAssociationContainsPredicate<>( namedAssociation( namedAssoc ), value ); } /** * Create a new CONTAINS NAME specification for a NamedAssociation. * * @param <T> NamedAssociation type * @param namedAssoc a NamedAssociation * @param name the name * * @return a new CONTAINS NAME specification for a NamedAssociation. */ public static <T> NamedAssociationContainsNamePredicate<T> containsName( NamedAssociation<T> namedAssoc, String name ) { return new NamedAssociationContainsNamePredicate<>( namedAssociation( namedAssoc ), name ); } // Ordering --------------------------------------------------------------| /** * Create a new Query ascending order segment for a Property. * * @param <T> type of the Property * @param property a Property * * @return a new Query ascending order segment for a Property. */ public static <T> OrderBy orderBy( final Property<T> property ) { return orderBy( property, OrderBy.Order.ASCENDING ); } /** * Create a new Query ordering segment for a Property. * * @param <T> type of the Property * @param property a Property * @param order ascending or descending * * @return a new Query ordering segment for a Property. */ public static <T> OrderBy orderBy( final Property<T> property, final OrderBy.Order order ) { return new OrderBy( property( property ), order ); } // Query Templates InvocationHandlers ------------------------------------| private static class TemplateHandler<T> implements InvocationHandler { private final PropertyFunction<?> compositeProperty; private final AssociationFunction<?> compositeAssociation; private final ManyAssociationFunction<?> compositeManyAssociation; private final NamedAssociationFunction<?> compositeNamedAssociation; private TemplateHandler( PropertyFunction<?> compositeProperty, AssociationFunction<?> compositeAssociation, ManyAssociationFunction<?> compositeManyAssociation, NamedAssociationFunction<?> compositeNamedAssociation ) { this.compositeProperty = compositeProperty; this.compositeAssociation = compositeAssociation; this.compositeManyAssociation = compositeManyAssociation; this.compositeNamedAssociation = compositeNamedAssociation; } @Override public Object invoke( Object o, Method method, Object[] objects ) throws Throwable { if( Property.class.isAssignableFrom( method.getReturnType() ) ) { return Proxy.newProxyInstance( method.getReturnType().getClassLoader(), array( method.getReturnType() ), new PropertyReferenceHandler<>( new PropertyFunction<T>( compositeProperty, compositeAssociation, compositeManyAssociation, compositeNamedAssociation, method ) ) ); } else if( Association.class.isAssignableFrom( method.getReturnType() ) ) { return Proxy.newProxyInstance( method.getReturnType().getClassLoader(), array( method.getReturnType() ), new AssociationReferenceHandler<>( new AssociationFunction<T>( compositeAssociation, compositeManyAssociation, compositeNamedAssociation, method ) ) ); } else if( ManyAssociation.class.isAssignableFrom( method.getReturnType() ) ) { return Proxy.newProxyInstance( method.getReturnType().getClassLoader(), array( method.getReturnType() ), new ManyAssociationReferenceHandler<>( new ManyAssociationFunction<T>( compositeAssociation, compositeManyAssociation, compositeNamedAssociation, method ) ) ); } else if( NamedAssociation.class.isAssignableFrom( method.getReturnType() ) ) { return Proxy.newProxyInstance( method.getReturnType().getClassLoader(), array( method.getReturnType() ), new NamedAssociationReferenceHandler<>( new NamedAssociationFunction<T>( compositeAssociation, compositeManyAssociation, compositeNamedAssociation, method ) ) ); } return null; } } private static class PropertyReferenceHandler<T> implements InvocationHandler { private final PropertyFunction<T> property; private PropertyReferenceHandler( PropertyFunction<T> property ) { this.property = property; } private PropertyFunction<T> property() { return property; } @Override public Object invoke( Object o, final Method method, Object[] objects ) throws Throwable { if( method.equals( Property.class.getMethod( "get" ) ) ) { Type propertyType = GenericPropertyInfo.propertyTypeOf( property.accessor() ); if( propertyType.getClass().equals( Class.class ) ) { return Proxy.newProxyInstance( method.getDeclaringClass().getClassLoader(), array( (Class<?>) propertyType, PropertyReference.class ), new TemplateHandler<T>( property, null, null, null ) ); } } return null; } } private static class AssociationReferenceHandler<T> implements InvocationHandler { private final AssociationFunction<T> association; private AssociationReferenceHandler( AssociationFunction<T> association ) { this.association = association; } private AssociationFunction<T> association() { return association; } @Override public Object invoke( Object o, final Method method, Object[] objects ) throws Throwable { if( method.equals( Association.class.getMethod( "get" ) ) ) { Type associationType = GenericAssociationInfo.associationTypeOf( association.accessor() ); if( associationType.getClass().equals( Class.class ) ) { return Proxy.newProxyInstance( method.getDeclaringClass().getClassLoader(), array( (Class) associationType, PropertyReference.class ), new TemplateHandler<T>( null, association, null, null ) ); } } return null; } } private static class ManyAssociationReferenceHandler<T> implements InvocationHandler { private final ManyAssociationFunction<T> manyAssociation; private ManyAssociationReferenceHandler( ManyAssociationFunction<T> manyAssociation ) { this.manyAssociation = manyAssociation; } public ManyAssociationFunction<T> manyAssociation() { return manyAssociation; } @Override public Object invoke( Object o, final Method method, Object[] objects ) throws Throwable { if( method.equals( ManyAssociation.class.getMethod( "get", Integer.TYPE ) ) ) { Type manyAssociationType = GenericAssociationInfo.associationTypeOf( manyAssociation.accessor() ); if( manyAssociationType.getClass().equals( Class.class ) ) { return Proxy.newProxyInstance( method.getDeclaringClass().getClassLoader(), array( (Class) manyAssociationType, PropertyReference.class ), new TemplateHandler<T>( null, null, manyAssociation, null ) ); } } return null; } } private static class NamedAssociationReferenceHandler<T> implements InvocationHandler { private final NamedAssociationFunction<T> namedAssociation; private NamedAssociationReferenceHandler( NamedAssociationFunction<T> namedAssociation ) { this.namedAssociation = namedAssociation; } public NamedAssociationFunction<T> namedAssociation() { return namedAssociation; } @Override public Object invoke( Object o, final Method method, Object[] objects ) throws Throwable { if( method.equals( NamedAssociation.class.getMethod( "get", String.class ) ) ) { Type namedAssociationType = GenericAssociationInfo.associationTypeOf( namedAssociation.accessor() ); if( namedAssociationType.getClass().equals( Class.class ) ) { return Proxy.newProxyInstance( method.getDeclaringClass().getClassLoader(), array( (Class) namedAssociationType, PropertyReference.class ), new TemplateHandler<T>( null, null, null, namedAssociation ) ); } } return null; } } @SafeVarargs private static <T> T[] array( T... array ) { return array; } private QueryExpressions() { } }
googleapis/google-cloud-java
36,515
java-scheduler/google-cloud-scheduler/src/test/java/com/google/cloud/scheduler/v1beta1/CloudSchedulerClientHttpJsonTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.scheduler.v1beta1; import static com.google.cloud.scheduler.v1beta1.CloudSchedulerClient.ListJobsPagedResponse; import static com.google.cloud.scheduler.v1beta1.CloudSchedulerClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.testing.MockHttpService; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ApiException; import com.google.api.gax.rpc.ApiExceptionFactory; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.testing.FakeStatusCode; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.scheduler.v1beta1.stub.HttpJsonCloudSchedulerStub; import com.google.common.collect.Lists; import com.google.protobuf.Any; import com.google.protobuf.Duration; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import com.google.rpc.Status; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.List; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class CloudSchedulerClientHttpJsonTest { private static MockHttpService mockService; private static CloudSchedulerClient client; @BeforeClass public static void startStaticServer() throws IOException { mockService = new MockHttpService( HttpJsonCloudSchedulerStub.getMethodDescriptors(), CloudSchedulerSettings.getDefaultEndpoint()); CloudSchedulerSettings settings = CloudSchedulerSettings.newHttpJsonBuilder() .setTransportChannelProvider( CloudSchedulerSettings.defaultHttpJsonTransportProviderBuilder() .setHttpTransport(mockService) .build()) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = CloudSchedulerClient.create(settings); } @AfterClass public static void stopServer() { client.close(); } @Before public void setUp() {} @After public void tearDown() throws Exception { mockService.reset(); } @Test public void listJobsTest() throws Exception { Job responsesElement = Job.newBuilder().build(); ListJobsResponse expectedResponse = ListJobsResponse.newBuilder() .setNextPageToken("") .addAllJobs(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListJobsPagedResponse pagedListResponse = client.listJobs(parent); List<Job> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listJobsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listJobsTest2() throws Exception { Job responsesElement = Job.newBuilder().build(); ListJobsResponse expectedResponse = ListJobsResponse.newBuilder() .setNextPageToken("") .addAllJobs(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; ListJobsPagedResponse pagedListResponse = client.listJobs(parent); List<Job> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listJobsExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; client.listJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getJobTest() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); Job actualResponse = client.getJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); client.getJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getJobTest2() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; Job actualResponse = client.getJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; client.getJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createJobTest() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); Job job = Job.newBuilder().build(); Job actualResponse = client.createJob(parent, job); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); Job job = Job.newBuilder().build(); client.createJob(parent, job); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createJobTest2() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; Job job = Job.newBuilder().build(); Job actualResponse = client.createJob(parent, job); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; Job job = Job.newBuilder().build(); client.createJob(parent, job); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateJobTest() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); Job job = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); FieldMask updateMask = FieldMask.newBuilder().build(); Job actualResponse = client.updateJob(job, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void updateJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { Job job = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateJob(job, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteJobTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockService.addResponse(expectedResponse); JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); client.deleteJob(name); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void deleteJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); client.deleteJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteJobTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockService.addResponse(expectedResponse); String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; client.deleteJob(name); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void deleteJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; client.deleteJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void pauseJobTest() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); Job actualResponse = client.pauseJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void pauseJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); client.pauseJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void pauseJobTest2() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; Job actualResponse = client.pauseJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void pauseJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; client.pauseJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void resumeJobTest() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); Job actualResponse = client.resumeJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void resumeJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); client.resumeJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void resumeJobTest2() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; Job actualResponse = client.resumeJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void resumeJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; client.resumeJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void runJobTest() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); Job actualResponse = client.runJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void runJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { JobName name = JobName.of("[PROJECT]", "[LOCATION]", "[JOB]"); client.runJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void runJobTest2() throws Exception { Job expectedResponse = Job.newBuilder() .setName(JobName.of("[PROJECT]", "[LOCATION]", "[JOB]").toString()) .setDescription("description-1724546052") .setSchedule("schedule-697920873") .setTimeZone("timeZone-2077180903") .setUserUpdateTime(Timestamp.newBuilder().build()) .setStatus(Status.newBuilder().build()) .setScheduleTime(Timestamp.newBuilder().build()) .setLastAttemptTime(Timestamp.newBuilder().build()) .setRetryConfig(RetryConfig.newBuilder().build()) .setAttemptDeadline(Duration.newBuilder().build()) .setLegacyAppEngineCron(true) .build(); mockService.addResponse(expectedResponse); String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; Job actualResponse = client.runJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void runJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-3306/locations/location-3306/jobs/job-3306"; client.runJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("projects/project-3664") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listLocationsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("projects/project-3664") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder() .setName("projects/project-9062/locations/location-9062") .build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getLocationExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder() .setName("projects/project-9062/locations/location-9062") .build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
apache/jackrabbit-oak
36,853
oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionHookTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.permission; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.api.security.JackrabbitAccessControlList; import org.apache.jackrabbit.api.security.user.Group; import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils; import org.apache.jackrabbit.oak.AbstractSecurityTest; import org.apache.jackrabbit.oak.api.ContentSession; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.commons.collections.IterableUtils; import org.apache.jackrabbit.oak.commons.collections.SetUtils; import org.apache.jackrabbit.oak.plugins.memory.MemoryChildNodeEntry; import org.apache.jackrabbit.oak.plugins.tree.RootProvider; import org.apache.jackrabbit.oak.plugins.tree.TreeProvider; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; import org.apache.jackrabbit.oak.security.authorization.ProviderCtx; import org.apache.jackrabbit.oak.security.authorization.monitor.AuthorizationMonitor; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider; import org.apache.jackrabbit.oak.spi.mount.Mounts; import org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants; import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration; import org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants; import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionConstants; import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.privilege.JcrAllUtil; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.jcr.RepositoryException; import javax.jcr.security.AccessControlEntry; import javax.jcr.security.AccessControlManager; import java.security.Principal; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.jackrabbit.JcrConstants.JCR_MIXINTYPES; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Testing the {@code PermissionHook} */ public class PermissionHookTest extends AbstractSecurityTest implements AccessControlConstants, PermissionConstants, PrivilegeConstants { protected String testPath = "/testPath"; protected String childPath = "/testPath/childNode"; protected Principal testPrincipal; protected List<Principal> principals = new ArrayList<>(); private AuthorizationMonitor monitor; @Override @Before public void before() throws Exception { super.before(); testPrincipal = getTestUser().getPrincipal(); Tree rootNode = root.getTree("/"); Tree testNode = TreeUtil.addChild(rootNode, "testPath", JcrConstants.NT_UNSTRUCTURED); TreeUtil.addChild(testNode, "childNode", JcrConstants.NT_UNSTRUCTURED); addACE(testPath, testPrincipal, JCR_ADD_CHILD_NODES); addACE(testPath, EveryonePrincipal.getInstance(), JCR_READ); root.commit(); PrivilegeBitsProvider bitsProvider = new PrivilegeBitsProvider(root); monitor = mock(AuthorizationMonitor.class); } @Override @After public void after() throws Exception { try { root.refresh(); Tree test = root.getTree(testPath); if (test.exists()) { test.remove(); } for (Principal principal : principals) { getUserManager(root).getAuthorizable(principal).remove(); } root.commit(); } finally { super.after(); } } private ProviderCtx mockProviderContext(@NotNull MountInfoProvider mip, @NotNull RootProvider rootProvider, @NotNull TreeProvider treeProvider) { ProviderCtx ctx = mock(ProviderCtx.class); when(ctx.getMountInfoProvider()).thenReturn(mip); when(ctx.getRootProvider()).thenReturn(rootProvider); when(ctx.getTreeProvider()).thenReturn(treeProvider); when(ctx.getMonitor()).thenReturn(monitor); return ctx; } private PermissionHook createPermissionHook(@NotNull String wspName) { return new PermissionHook(wspName, RestrictionProvider.EMPTY, mockProviderContext(Mounts.defaultMountInfoProvider(), getRootProvider(), getTreeProvider())); } private void addACE(@NotNull String path, @NotNull Principal principal, @NotNull String... privilegeNames) throws RepositoryException { AccessControlManager acMgr = getAccessControlManager(root); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, path); acl.addAccessControlEntry(principal, privilegesFromNames(privilegeNames)); acMgr.setPolicy(path, acl); } private Tree getPrincipalRoot(@NotNull Principal principal) { return root.getTree(PERMISSIONS_STORE_PATH).getChild(adminSession.getWorkspaceName()).getChild(principal.getName()); } private Tree getEntry(@NotNull Principal principal, String accessControlledPath, long index) throws Exception { Tree principalRoot = getPrincipalRoot(principal); Tree parent = principalRoot.getChild(PermissionUtil.getEntryName(accessControlledPath)); Tree entry = parent.getChild(String.valueOf(index)); if (!entry.exists()) { throw new RepositoryException("no such entry"); } return entry; } private long cntEntries(Tree parent) { long cnt = parent.getChildrenCount(Long.MAX_VALUE); for (Tree child : parent.getChildren()) { cnt += cntEntries(child); } return cnt; } private void createPrincipals() throws Exception { if (principals.isEmpty()) { for (int i = 0; i < 10; i++) { Group gr = getUserManager(root).createGroup("testGroup" + i); principals.add(gr.getPrincipal()); } root.commit(); } } private static void assertIndex(int expected, Tree entry) { assertEquals(expected, Integer.parseInt(entry.getName())); } @NotNull private static Set<String> getAccessControlledPaths(@NotNull Tree principalTree) { Set<String> s = new HashSet<>(); for (Tree tree : principalTree.getChildren()) { String path = getAccessControlledPath(tree); if (path != null) { s.add(path); } for (Tree child : tree.getChildren()) { if (child.getName().startsWith("c")) { String childPath = getAccessControlledPath(child); if (childPath != null) { s.add(childPath); } } } } return s; } @Nullable private static String getAccessControlledPath(@NotNull Tree t) { PropertyState pathProp = t.getProperty(REP_ACCESS_CONTROLLED_PATH); return (pathProp == null) ? null : pathProp.getValue(Type.STRING); } private static void assertNumPermissionsProperty(long expectedValue, @NotNull Tree parent) { PropertyState p = parent.getProperty(REP_NUM_PERMISSIONS); assertNotNull(p); assertEquals(expectedValue, p.getValue(Type.LONG).longValue()); } @Test public void testModifyRestrictions() throws Exception { Tree testAce = root.getTree(testPath + "/rep:policy").getChildren().iterator().next(); assertEquals(testPrincipal.getName(), testAce.getProperty(REP_PRINCIPAL_NAME).getValue(Type.STRING)); // add a new restriction node through the OAK API instead of access control manager Tree restrictions = TreeUtil.addChild(testAce, REP_RESTRICTIONS, NT_REP_RESTRICTIONS); restrictions.setProperty(REP_GLOB, "*"); String restrictionsPath = restrictions.getPath(); root.commit(); Tree principalRoot = getPrincipalRoot(testPrincipal); assertEquals(2, cntEntries(principalRoot)); Tree parent = principalRoot.getChildren().iterator().next(); assertEquals("*", parent.getChildren().iterator().next().getProperty(REP_GLOB).getValue(Type.STRING)); // modify the restrictions node Tree restrictionsNode = root.getTree(restrictionsPath); restrictionsNode.setProperty(REP_GLOB, "/*/jcr:content/*"); root.commit(); principalRoot = getPrincipalRoot(testPrincipal); assertEquals(2, cntEntries(principalRoot)); parent = principalRoot.getChildren().iterator().next(); assertEquals("/*/jcr:content/*", parent.getChildren().iterator().next().getProperty(REP_GLOB).getValue(Type.STRING)); // remove the restriction again root.getTree(restrictionsPath).remove(); root.commit(); principalRoot = getPrincipalRoot(testPrincipal); assertEquals(2, cntEntries(principalRoot)); parent = principalRoot.getChildren().iterator().next(); assertNull(parent.getChildren().iterator().next().getProperty(REP_GLOB)); } @Test public void testReorderAce() throws Exception { Tree entry = getEntry(testPrincipal, testPath, 0); assertIndex(0, entry); Tree aclTree = root.getTree(testPath + "/rep:policy"); aclTree.getChildren().iterator().next().orderBefore(null); root.commit(); entry = getEntry(testPrincipal, testPath, 1); assertIndex(1, entry); } @Test public void testReorderAndAddAce() throws Exception { Tree entry = getEntry(testPrincipal, testPath, 0); assertIndex(0, entry); Tree aclTree = root.getTree(testPath + "/rep:policy"); // reorder aclTree.getChildren().iterator().next().orderBefore(null); // add a new entry Tree ace = TreeUtil.addChild(aclTree, "denyEveryoneLockMgt", NT_REP_DENY_ACE); ace.setProperty(REP_PRINCIPAL_NAME, EveryonePrincipal.NAME); ace.setProperty(AccessControlConstants.REP_PRIVILEGES, Collections.singleton(JCR_LOCK_MANAGEMENT), Type.NAMES); root.commit(); entry = getEntry(testPrincipal, testPath, 1); assertIndex(1, entry); } @Test public void testReorderAddAndRemoveAces() throws Exception { Tree entry = getEntry(testPrincipal, testPath, 0); assertIndex(0, entry); Tree aclTree = root.getTree(testPath + "/rep:policy"); // reorder testPrincipal entry to the end aclTree.getChildren().iterator().next().orderBefore(null); Iterator<Tree> aceIt = aclTree.getChildren().iterator(); // remove the everyone entry aceIt.next().remove(); // remember the name of the testPrincipal entry. String name = aceIt.next().getName(); // add a new entry Tree ace = TreeUtil.addChild(aclTree, "denyEveryoneLockMgt", NT_REP_DENY_ACE); ace.setProperty(REP_PRINCIPAL_NAME, EveryonePrincipal.NAME); ace.setProperty(AccessControlConstants.REP_PRIVILEGES, Collections.singleton(JCR_LOCK_MANAGEMENT), Type.NAMES); // reorder the new entry before the remaining existing entry ace.orderBefore(name); root.commit(); entry = getEntry(testPrincipal, testPath, 1); assertIndex(1, entry); } /** * ACE : 0 1 2 3 4 5 6 7 * Before : tp ev p0 p1 p2 p3 * After : ev p2 p1 p3 p4 p5 */ @Test public void testReorderAddAndRemoveAces2() throws Exception { createPrincipals(); AccessControlManager acMgr = getAccessControlManager(root); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, testPath); for (int i = 0; i < 4; i++) { acl.addAccessControlEntry(principals.get(i), privilegesFromNames(JCR_READ)); } acMgr.setPolicy(testPath, acl); root.commit(); AccessControlEntry[] aces = acl.getAccessControlEntries(); acl.removeAccessControlEntry(aces[0]); acl.removeAccessControlEntry(aces[2]); acl.orderBefore(aces[4], aces[3]); acl.addAccessControlEntry(principals.get(4), privilegesFromNames(JCR_READ)); acl.addAccessControlEntry(principals.get(5), privilegesFromNames(JCR_READ)); acMgr.setPolicy(testPath, acl); root.commit(); Tree entry = getEntry(principals.get(2), testPath, 1); assertIndex(1, entry); entry = getEntry(principals.get(1), testPath, 2); assertIndex(2, entry); } /** * ACE : 0 1 2 3 4 5 6 7 * Before : tp ev p0 p1 p2 p3 * After : p1 ev p3 p2 */ @Test public void testReorderAndRemoveAces() throws Exception { createPrincipals(); AccessControlManager acMgr = getAccessControlManager(root); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, testPath); for (int i = 0; i < 4; i++) { acl.addAccessControlEntry(principals.get(i), privilegesFromNames(JCR_READ)); } acMgr.setPolicy(testPath, acl); root.commit(); AccessControlEntry[] aces = acl.getAccessControlEntries(); acl.removeAccessControlEntry(aces[0]); acl.removeAccessControlEntry(aces[2]); acl.orderBefore(aces[4], null); acl.orderBefore(aces[3], aces[1]); acMgr.setPolicy(testPath, acl); root.commit(); Tree entry = getEntry(EveryonePrincipal.getInstance(), testPath, 1); assertIndex(1, entry); entry = getEntry(principals.get(2), testPath, 3); assertIndex(3, entry); for (Principal p : new Principal[]{testPrincipal, principals.get(0)}) { try { getEntry(p, testPath, 0); fail(); } catch (RepositoryException e) { // success } } } @Test public void testImplicitAceRemoval() throws Exception { AccessControlManager acMgr = getAccessControlManager(root); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, testPath); acl.addAccessControlEntry(testPrincipal, privilegesFromNames(JCR_READ, REP_WRITE)); acMgr.setPolicy(testPath, acl); acl = AccessControlUtils.getAccessControlList(acMgr, childPath); acl.addAccessControlEntry(EveryonePrincipal.getInstance(), privilegesFromNames(JCR_READ)); acMgr.setPolicy(childPath, acl); root.commit(); assertTrue(root.getTree(childPath + "/rep:policy").exists()); Tree principalRoot = getPrincipalRoot(EveryonePrincipal.getInstance()); assertEquals(4, cntEntries(principalRoot)); ContentSession testSession = createTestSession(); Root testRoot = testSession.getLatestRoot(); assertTrue(testRoot.getTree(childPath).exists()); assertFalse(testRoot.getTree(childPath + "/rep:policy").exists()); testRoot.getTree(childPath).remove(); testRoot.commit(); testSession.close(); root.refresh(); assertFalse(root.getTree(testPath).hasChild("childNode")); assertFalse(root.getTree(childPath + "/rep:policy").exists()); // aces must be removed in the permission store even if the editing // session wasn't able to access them. principalRoot = getPrincipalRoot(EveryonePrincipal.getInstance()); assertEquals(2, cntEntries(principalRoot)); } /** * @see <a href="https://issues.apache.org/jira/browse/OAK-2015">OAK-2015</a> */ @Test public void testDynamicJcrAll() throws Exception { AccessControlManager acMgr = getAccessControlManager(root); // grant 'everyone' jcr:all at the child path. JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, childPath); acl.addAccessControlEntry(EveryonePrincipal.getInstance(), privilegesFromNames(JCR_ALL)); acMgr.setPolicy(childPath, acl); root.commit(); // verify that the permission store contains an entry for everyone at childPath // and the privilegeBits for jcr:all are reflect with a placeholder value. Tree allEntry = getEntry(EveryonePrincipal.getInstance(), childPath, 0); assertTrue(allEntry.exists()); PropertyState ps = allEntry.getProperty(PermissionConstants.REP_PRIVILEGE_BITS); assertEquals(1, ps.count()); assertTrue(JcrAllUtil.denotesDynamicJcrAll(ps)); // verify that the permission provider still exposes the correct privilege // (jcr:all) for the given childPath irrespective of the dynamic nature of // the privilege bits in the persisted permission entry. Set<Principal> principalSet = Set.of(EveryonePrincipal.getInstance()); PermissionProvider permissionProvider = getConfig(AuthorizationConfiguration.class).getPermissionProvider(root, root.getContentSession().getWorkspaceName(), principalSet); Tree childTree = root.getTree(childPath); assertTrue(permissionProvider.hasPrivileges(childTree, PrivilegeConstants.JCR_ALL)); assertTrue(permissionProvider.getPrivileges(childTree).contains(PrivilegeConstants.JCR_ALL)); // also verify the permission evaluation long diff = Permissions.diff(Permissions.ALL, Permissions.REMOVE_NODE|Permissions.ADD_NODE); assertFalse(permissionProvider.isGranted(childTree, null, Permissions.REMOVE_NODE)); assertFalse(permissionProvider.isGranted(childTree, null, Permissions.ADD_NODE)); assertTrue(permissionProvider.isGranted(childTree, null, diff)); // remove the ACE again acl = AccessControlUtils.getAccessControlList(acMgr, childPath); for (AccessControlEntry ace : acl.getAccessControlEntries()) { if (EveryonePrincipal.NAME.equals(ace.getPrincipal().getName())) { acl.removeAccessControlEntry(ace); } } acMgr.setPolicy(childPath, acl); root.commit(); // verify that the corresponding permission entry has been removed. Tree everyoneRoot = getPrincipalRoot(EveryonePrincipal.getInstance()); Tree parent = everyoneRoot.getChild(PermissionUtil.getEntryName(childPath)); if (parent.exists()) { assertFalse(parent.getChild("0").exists()); } } @Test public void testNumPermissionsProperty() throws Exception { Tree everyoneRoot = getPrincipalRoot(EveryonePrincipal.getInstance()); Tree testRoot = getPrincipalRoot(testPrincipal); // initial state after setup assertNumPermissionsProperty(1, everyoneRoot); assertNumPermissionsProperty(1, testRoot); // add another acl with an entry for everyone addACE(childPath, EveryonePrincipal.getInstance(), JCR_READ); root.commit(); assertNumPermissionsProperty(2, everyoneRoot); assertNumPermissionsProperty(1, testRoot); // adding another ACE at an existing ACL must not change num-permissions AccessControlManager acMgr = getAccessControlManager(root); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, childPath); acl = AccessControlUtils.getAccessControlList(acMgr, childPath); acl.addEntry(EveryonePrincipal.getInstance(), privilegesFromNames(JCR_READ), false, Map.of(REP_GLOB, getValueFactory(root).createValue("/*/jcr:content"))); acMgr.setPolicy(childPath, acl); root.commit(); assertNumPermissionsProperty(2, everyoneRoot); assertNumPermissionsProperty(1, testRoot); // remove policy at 'testPath' acMgr.removePolicy(testPath, AccessControlUtils.getAccessControlList(acMgr, testPath)); root.commit(); assertNumPermissionsProperty(1, everyoneRoot); assertNumPermissionsProperty(0, testRoot); // remove all ACEs on the childPath policy -> same effect as policy removal on permission store acl = AccessControlUtils.getAccessControlList(acMgr, childPath); for (AccessControlEntry entry : acl.getAccessControlEntries()) { acl.removeAccessControlEntry(entry); } acMgr.setPolicy(childPath, acl); root.commit(); assertNumPermissionsProperty(0, everyoneRoot); assertNumPermissionsProperty(0, testRoot); } @Test public void testCollisions() throws Exception { Tree testRoot = getPrincipalRoot(testPrincipal); assertNumPermissionsProperty(1, testRoot); String aaPath = testPath + "/Aa"; String bbPath = testPath + "/BB"; if (aaPath.hashCode() == bbPath.hashCode()) { try { Tree parent = root.getTree(testPath); Tree aa = TreeUtil.addChild(parent, "Aa", JcrConstants.NT_UNSTRUCTURED); addACE(aa.getPath(), testPrincipal, JCR_READ); Tree bb = TreeUtil.addChild(parent, "BB", JcrConstants.NT_UNSTRUCTURED); addACE(bb.getPath(), testPrincipal, JCR_READ); root.commit(); assertEquals(2, testRoot.getChildrenCount(Long.MAX_VALUE)); assertNumPermissionsProperty(3, testRoot); Set<String> accessControlledPaths = Set.of(testPath, aa.getPath(), bb.getPath()); assertEquals(accessControlledPaths, getAccessControlledPaths(testRoot)); } finally { root.getTree(aaPath).remove(); root.getTree(bbPath).remove(); root.commit(); } } else { fail(); } } @Test public void testCollisionRemoval() throws Exception { Tree testRoot = getPrincipalRoot(testPrincipal); assertNumPermissionsProperty(1, testRoot); String aaPath = testPath + "/Aa"; String bbPath = testPath + "/BB"; if (aaPath.hashCode() == bbPath.hashCode()) { Tree parent = root.getTree(testPath); Tree aa = TreeUtil.addChild(parent, "Aa", JcrConstants.NT_UNSTRUCTURED); addACE(aa.getPath(), testPrincipal, JCR_READ); Tree bb = TreeUtil.addChild(parent, "BB", JcrConstants.NT_UNSTRUCTURED); addACE(bb.getPath(), testPrincipal, JCR_READ); root.commit(); root.getTree(aaPath).remove(); root.commit(); assertEquals(2, testRoot.getChildrenCount(Long.MAX_VALUE)); assertTrue(testRoot.hasChild(bbPath.hashCode() + "")); assertEquals(Set.of(testPath, bb.getPath()), getAccessControlledPaths(testRoot)); assertNumPermissionsProperty(2, testRoot); } } @Test public void testCollisionRemoval2() throws Exception { Tree testRoot = getPrincipalRoot(testPrincipal); assertNumPermissionsProperty(1, testRoot); String aaPath = testPath + "/Aa"; String bbPath = testPath + "/BB"; if (aaPath.hashCode() == bbPath.hashCode()) { Tree parent = root.getTree(testPath); Tree aa = TreeUtil.addChild(parent, "Aa", JcrConstants.NT_UNSTRUCTURED); addACE(aa.getPath(), testPrincipal, JCR_READ); Tree bb = TreeUtil.addChild(parent, "BB", JcrConstants.NT_UNSTRUCTURED); addACE(bb.getPath(), testPrincipal, JCR_READ); root.commit(); root.getTree(bbPath).remove(); root.commit(); assertEquals(2, testRoot.getChildrenCount(Long.MAX_VALUE)); assertTrue(testRoot.hasChild(aaPath.hashCode() + "")); assertEquals(Set.of(testPath, aa.getPath()), getAccessControlledPaths(testRoot)); assertNumPermissionsProperty(2, testRoot); } } @Test public void testCollisionRemoval3() throws Exception { Tree testRoot = getPrincipalRoot(testPrincipal); assertNumPermissionsProperty(1, testRoot); String aaPath = testPath + "/Aa"; String bbPath = testPath + "/BB"; if (aaPath.hashCode() == bbPath.hashCode()) { Tree parent = root.getTree(testPath); Tree aa = TreeUtil.addChild(parent, "Aa", JcrConstants.NT_UNSTRUCTURED); addACE(aa.getPath(), testPrincipal, JCR_READ); Tree bb = TreeUtil.addChild(parent, "BB", JcrConstants.NT_UNSTRUCTURED); addACE(bb.getPath(), testPrincipal, JCR_READ); root.commit(); root.getTree(aaPath).remove(); root.getTree(bbPath).remove(); root.commit(); assertEquals(1, testRoot.getChildrenCount(Long.MAX_VALUE)); assertFalse(testRoot.hasChild(aaPath.hashCode() + "")); assertFalse(testRoot.hasChild(bbPath.hashCode() + "")); assertEquals(Set.of(testPath), getAccessControlledPaths(testRoot)); assertNumPermissionsProperty(1, testRoot); } } @Test public void testCollisionRemoval4() throws Exception { Tree testRoot = getPrincipalRoot(testPrincipal); String aPath = testPath + "/AaAa"; String bPath = testPath + "/BBBB"; String cPath = testPath + "/AaBB"; if (aPath.hashCode() == bPath.hashCode() && bPath.hashCode() == cPath.hashCode()) { String name = aPath.hashCode() + ""; Tree parent = root.getTree(testPath); Tree aa = TreeUtil.addChild(parent, "AaAa", JcrConstants.NT_UNSTRUCTURED); addACE(aa.getPath(), testPrincipal, JCR_READ); Tree bb = TreeUtil.addChild(parent, "BBBB", JcrConstants.NT_UNSTRUCTURED); addACE(bb.getPath(), testPrincipal, JCR_READ); Tree cc = TreeUtil.addChild(parent, "AaBB", JcrConstants.NT_UNSTRUCTURED); addACE(cc.getPath(), testPrincipal, JCR_READ); root.commit(); Set<String> paths = SetUtils.toSet(aPath, bPath, cPath); paths.add(testPath); assertEquals(2, testRoot.getChildrenCount(Long.MAX_VALUE)); assertEquals(paths, getAccessControlledPaths(testRoot)); assertNumPermissionsProperty(paths.size(), testRoot); String toRemove = null; for (String path : paths) { if (testRoot.hasChild(name) && path.equals(getAccessControlledPath(testRoot.getChild(name)))) { toRemove = path; break; } } assertNotNull(toRemove); paths.remove(toRemove); root.getTree(toRemove).remove(); root.commit(); assertEquals(2, testRoot.getChildrenCount(Long.MAX_VALUE)); assertTrue(testRoot.hasChild(toRemove.hashCode() + "")); assertNotEquals(toRemove, getAccessControlledPath(testRoot.getChild(name))); assertEquals(paths, getAccessControlledPaths(testRoot)); assertNumPermissionsProperty(paths.size(), testRoot); } } @Test public void testCollisionRemovalSubsequentAdd() throws Exception { Tree testRoot = getPrincipalRoot(testPrincipal); String aPath = testPath + "/AaAa"; String bPath = testPath + "/BBBB"; String cPath = testPath + "/AaBB"; String dPath = testPath + "/BBAa"; if (aPath.hashCode() == bPath.hashCode() && bPath.hashCode() == cPath.hashCode() && cPath.hashCode() == dPath.hashCode()) { String name = aPath.hashCode() + ""; Tree parent = root.getTree(testPath); Tree aa = TreeUtil.addChild(parent, "AaAa", JcrConstants.NT_UNSTRUCTURED); addACE(aa.getPath(), testPrincipal, JCR_READ); Tree bb = TreeUtil.addChild(parent, "BBBB", JcrConstants.NT_UNSTRUCTURED); addACE(bb.getPath(), testPrincipal, JCR_READ); Tree cc = TreeUtil.addChild(parent, "AaBB", JcrConstants.NT_UNSTRUCTURED); addACE(cc.getPath(), testPrincipal, JCR_READ); root.commit(); Set<String> paths = SetUtils.toSet(aPath, bPath, cPath); paths.add(testPath); assertEquals(2, testRoot.getChildrenCount(Long.MAX_VALUE)); assertEquals(paths, getAccessControlledPaths(testRoot)); String toRemove = null; for (String path : paths) { if (testRoot.hasChild(name) && path.equals(getAccessControlledPath(testRoot.getChild(name)))) { toRemove = path; break; } } paths.remove(toRemove); root.getTree(toRemove).remove(); root.commit(); Tree dd = TreeUtil.addChild(parent, "BBAa", JcrConstants.NT_UNSTRUCTURED); addACE(dd.getPath(), testPrincipal, JCR_READ); root.commit(); assertEquals(2, testRoot.getChildrenCount(Long.MAX_VALUE)); paths.add(dPath); assertEquals(paths, getAccessControlledPaths(testRoot)); } else { fail(); } } @Test public void testPolicyNodeNoLongerOfTypeRepACL() throws Exception { AccessControlManager acMgr = getAccessControlManager(root); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, testPath); acMgr.removePolicy(acl.getPath(), acl); Tree test = root.getTree(testPath); test.removeProperty(JCR_MIXINTYPES); TreeUtil.addChild(test, AccessControlConstants.REP_POLICY, NodeTypeConstants.NT_OAK_UNSTRUCTURED); Tree principalPermissionStore = root.getTree(PermissionConstants.PERMISSIONS_STORE_PATH).getChild(adminSession.getWorkspaceName()).getChild(testPrincipal.getName()); Tree permissionEntry = principalPermissionStore.getChildren().iterator().next(); assertTrue(permissionEntry.exists()); String path = permissionEntry.getPath(); root.commit(); permissionEntry = root.getTree(path); assertFalse(permissionEntry.exists()); } @Test public void testInvalidPolicyNodeBecomesTypeRepACL() throws Exception { Tree t = root.getTree(testPath).getChild("childNode"); TreeUtil.addChild(t, AccessControlConstants.REP_POLICY, NodeTypeConstants.NT_OAK_UNSTRUCTURED); root.commit(); Tree principalPermissionStore = root.getTree(PermissionConstants.PERMISSIONS_STORE_PATH).getChild(adminSession.getWorkspaceName()).getChild(testPrincipal.getName()); assertEquals(1, principalPermissionStore.getChildrenCount(10)); AccessControlManager acMgr = getAccessControlManager(root); t.getChild(REP_POLICY).remove(); JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, t.getPath()); acl.addAccessControlEntry(testPrincipal, privilegesFromNames(PrivilegeConstants.JCR_READ)); acMgr.setPolicy(acl.getPath(), acl); root.commit(); principalPermissionStore = root.getTree(PermissionConstants.PERMISSIONS_STORE_PATH).getChild(adminSession.getWorkspaceName()).getChild(testPrincipal.getName()); assertEquals(2, principalPermissionStore.getChildrenCount(10)); Iterable<String> paths = IterableUtils.transform(principalPermissionStore.getChildren(), tree -> tree.getProperty(REP_ACCESS_CONTROLLED_PATH).getValue(Type.STRING)); assertEquals(Set.of(testPath, t.getPath()), SetUtils.toSet(paths)); } @Test public void testToString() { PermissionHook h1 = createPermissionHook("wspName"); PermissionHook h2 = new PermissionHook("default", mock(RestrictionProvider.class), mockProviderContext(mock(MountInfoProvider.class), mock(RootProvider.class), mock(TreeProvider.class))); assertEquals(h1.toString(), h2.toString()); } @Test public void testHiddenChildNodeAdded() throws Exception { PermissionHook ph = createPermissionHook(adminSession.getWorkspaceName()); NodeState before = getTreeProvider().asNodeState(root.getTree(PathUtils.ROOT_PATH)); NodeState after = spy(before); NodeState child = mock(NodeState.class); Iterable newCnes = Collections.singleton(new MemoryChildNodeEntry(":hidden", child)); Iterable cnes = IterableUtils.chainedIterable(newCnes, before.getChildNodeEntries()); when(after.getChildNodeEntries()).thenReturn(cnes); when(after.getChildNode(":hidden")).thenReturn(child); ph.processCommit(before, after, new CommitInfo("sid", null)); verify(child, never()).getProperty(anyString()); } @Test public void testHiddenChildNodeChanged() { PermissionHook ph = createPermissionHook(adminSession.getWorkspaceName()); NodeState nodeState = getTreeProvider().asNodeState(root.getTree(PathUtils.ROOT_PATH)); NodeState after = spy(nodeState); NodeState before = spy(nodeState); NodeState child = mock(NodeState.class); Iterable hidden = Collections.singleton(new MemoryChildNodeEntry(":hidden", child)); Iterable cnes = IterableUtils.chainedIterable(hidden, nodeState.getChildNodeEntries()); when(before.getChildNodeEntries()).thenReturn(cnes); when(before.getChildNode(":hidden")).thenReturn(child); NodeState child2 = when(mock(NodeState.class).exists()).thenReturn(true).getMock(); hidden = Collections.singleton(new MemoryChildNodeEntry(":hidden", child2)); cnes = IterableUtils.chainedIterable(hidden, nodeState.getChildNodeEntries()); when(after.getChildNodeEntries()).thenReturn(cnes); when(after.getChildNode(":hidden")).thenReturn(child2); ph.processCommit(before, after, new CommitInfo("sid", null)); verify(child, never()).getProperty(anyString()); verify(child2, never()).getProperty(anyString()); } @Test public void testHiddenChildNodeDeleted() { PermissionHook ph = createPermissionHook(adminSession.getWorkspaceName()); NodeState after = getTreeProvider().asNodeState(root.getTree(PathUtils.ROOT_PATH)); NodeState before = spy(after); NodeState child = mock(NodeState.class); Iterable deletedCnes = Collections.singleton(new MemoryChildNodeEntry(":hidden", child)); Iterable cnes = IterableUtils.chainedIterable(deletedCnes, after.getChildNodeEntries()); when(before.getChildNodeEntries()).thenReturn(cnes); when(before.getChildNode(":hidden")).thenReturn(child); ph.processCommit(before, after, new CommitInfo("sid", null)); verify(child, never()).getProperty(anyString()); } }
apache/kafka
36,944
streams/src/test/java/org/apache/kafka/streams/tests/EosTestDriver.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.tests; import org.apache.kafka.clients.admin.Admin; import org.apache.kafka.clients.admin.ConsumerGroupDescription; import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsResult; import org.apache.kafka.clients.admin.StreamsGroupDescription; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.IsolationLevel; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.TimeoutException; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.utils.Exit; import org.apache.kafka.common.utils.Utils; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; public class EosTestDriver extends SmokeTestUtil { private static final int MAX_NUMBER_OF_KEYS = 20000; private static final long MAX_IDLE_TIME_MS = 600000L; private static volatile boolean isRunning = true; private static final CountDownLatch TERMINATED = new CountDownLatch(1); private static int numRecordsProduced = 0; private static synchronized void updateNumRecordsProduces(final int delta) { numRecordsProduced += delta; } static void generate(final String kafka) { Exit.addShutdownHook("streams-eos-test-driver-shutdown-hook", () -> { System.out.println("Terminating"); isRunning = false; try { if (TERMINATED.await(5L, TimeUnit.MINUTES)) { System.out.println("Terminated"); } else { System.out.println("Terminated with timeout"); } } catch (final InterruptedException swallow) { swallow.printStackTrace(System.err); System.out.println("Terminated with error"); } System.err.flush(); System.out.flush(); }); final Properties producerProps = new Properties(); producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, "EosTest"); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka); producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); producerProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true); final Map<Integer, List<Long>> offsets = new HashMap<>(); try { try (final KafkaProducer<String, Integer> producer = new KafkaProducer<>(producerProps)) { final Random rand = new Random(System.currentTimeMillis()); while (isRunning) { final String key = "" + rand.nextInt(MAX_NUMBER_OF_KEYS); final int value = rand.nextInt(10000); final ProducerRecord<String, Integer> record = new ProducerRecord<>("data", key, value); producer.send(record, (metadata, exception) -> { if (exception != null) { exception.printStackTrace(System.err); System.err.flush(); if (exception instanceof TimeoutException) { try { // message == org.apache.kafka.common.errors.TimeoutException: Expiring 4 record(s) for data-0: 30004 ms has passed since last attempt plus backoff time final int expired = Integer.parseInt(exception.getMessage().split(" ")[2]); updateNumRecordsProduces(-expired); } catch (final Exception ignore) { } } } else { offsets.getOrDefault(metadata.partition(), new LinkedList<>()).add(metadata.offset()); } }); updateNumRecordsProduces(1); if (numRecordsProduced % 1000 == 0) { System.out.println(numRecordsProduced + " records produced"); System.out.flush(); } Utils.sleep(rand.nextInt(10)); } } System.out.println("Producer closed: " + numRecordsProduced + " records produced"); System.out.flush(); // verify offsets for (final Map.Entry<Integer, List<Long>> offsetsOfPartition : offsets.entrySet()) { offsetsOfPartition.getValue().sort(Long::compareTo); for (int i = 0; i < offsetsOfPartition.getValue().size() - 1; ++i) { if (offsetsOfPartition.getValue().get(i) != i) { System.err.println("Offset for partition " + offsetsOfPartition.getKey() + " is not " + i + " as expected but " + offsetsOfPartition.getValue().get(i)); System.err.flush(); } } System.out.println("Max offset of partition " + offsetsOfPartition.getKey() + " is " + offsetsOfPartition.getValue().get(offsetsOfPartition.getValue().size() - 1)); } final Properties props = new Properties(); props.put(ConsumerConfig.CLIENT_ID_CONFIG, "verifier"); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.toString()); try (final KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props)) { final List<TopicPartition> partitions = getAllPartitions(consumer, "data"); System.out.println("Partitions: " + partitions); System.out.flush(); consumer.assign(partitions); consumer.seekToEnd(partitions); for (final TopicPartition tp : partitions) { System.out.println("End-offset for " + tp + " is " + consumer.position(tp)); System.out.flush(); } } System.out.flush(); } finally { TERMINATED.countDown(); } } public static void verify(final String kafka, final boolean withRepartitioning, final String groupProtocol) { final Properties props = new Properties(); props.put(ConsumerConfig.CLIENT_ID_CONFIG, "verifier"); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.toString()); try (final KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props)) { verifyAllTransactionFinished(consumer, kafka, withRepartitioning); } catch (final Exception e) { e.printStackTrace(System.err); System.out.println("FAILED"); return; } final Map<TopicPartition, Long> committedOffsets; try (final Admin adminClient = Admin.create(props)) { ensureStreamsApplicationDown(adminClient, groupProtocol); committedOffsets = getCommittedOffsets(adminClient, withRepartitioning); } final String[] allInputTopics; final String[] allOutputTopics; if (withRepartitioning) { allInputTopics = new String[] {"data", "repartition"}; allOutputTopics = new String[] {"echo", "min", "sum", "repartition", "max", "cnt"}; } else { allInputTopics = new String[] {"data"}; allOutputTopics = new String[] {"echo", "min", "sum"}; } final Map<String, Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>>> inputRecordsPerTopicPerPartition; try (final KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props)) { final List<TopicPartition> partitions = getAllPartitions(consumer, allInputTopics); consumer.assign(partitions); consumer.seekToBeginning(partitions); inputRecordsPerTopicPerPartition = getRecords(consumer, committedOffsets, withRepartitioning, true); } catch (final Exception e) { e.printStackTrace(System.err); System.out.println("FAILED"); return; } final Map<String, Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>>> outputRecordsPerTopicPerPartition; try (final KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props)) { final List<TopicPartition> partitions = getAllPartitions(consumer, allOutputTopics); consumer.assign(partitions); consumer.seekToBeginning(partitions); outputRecordsPerTopicPerPartition = getRecords(consumer, consumer.endOffsets(partitions), withRepartitioning, false); } catch (final Exception e) { e.printStackTrace(System.err); System.out.println("FAILED"); return; } verifyReceivedAllRecords(inputRecordsPerTopicPerPartition.get("data"), outputRecordsPerTopicPerPartition.get("echo")); if (withRepartitioning) { verifyReceivedAllRecords(inputRecordsPerTopicPerPartition.get("data"), outputRecordsPerTopicPerPartition.get("repartition")); } verifyMin(inputRecordsPerTopicPerPartition.get("data"), outputRecordsPerTopicPerPartition.get("min")); verifySum(inputRecordsPerTopicPerPartition.get("data"), outputRecordsPerTopicPerPartition.get("sum")); if (withRepartitioning) { verifyMax(inputRecordsPerTopicPerPartition.get("repartition"), outputRecordsPerTopicPerPartition.get("max")); verifyCnt(inputRecordsPerTopicPerPartition.get("repartition"), outputRecordsPerTopicPerPartition.get("cnt")); } // do not modify: required test output System.out.println("ALL-RECORDS-DELIVERED"); System.out.flush(); } private static void ensureStreamsApplicationDown(final Admin adminClient, final String groupProtocol) { final long maxWaitTime = System.currentTimeMillis() + MAX_IDLE_TIME_MS; boolean isEmpty; do { if (Objects.equals(groupProtocol, "streams")) { final StreamsGroupDescription description = getStreamsGroupDescription(adminClient); isEmpty = description.members().isEmpty(); if (System.currentTimeMillis() > maxWaitTime && !isEmpty) { throwNotDownException(description); } } else { final ConsumerGroupDescription description = getConsumerGroupDescription(adminClient); isEmpty = description.members().isEmpty(); if (System.currentTimeMillis() > maxWaitTime && !isEmpty) { throwNotDownException(description); } } sleep(1000L); } while (!isEmpty); } private static void throwNotDownException(final Object description) { throw new RuntimeException( "Streams application not down after " + MAX_IDLE_TIME_MS / 1000L + " seconds. " + "Group: " + description ); } private static Map<TopicPartition, Long> getCommittedOffsets(final Admin adminClient, final boolean withRepartitioning) { final Map<TopicPartition, OffsetAndMetadata> topicPartitionOffsetAndMetadataMap; try { final ListConsumerGroupOffsetsResult listConsumerGroupOffsetsResult = adminClient.listConsumerGroupOffsets(EosTestClient.APP_ID); topicPartitionOffsetAndMetadataMap = listConsumerGroupOffsetsResult.partitionsToOffsetAndMetadata().get(10, TimeUnit.SECONDS); } catch (final Exception e) { e.printStackTrace(); throw new RuntimeException(e); } final Map<TopicPartition, Long> committedOffsets = new HashMap<>(); for (final Map.Entry<TopicPartition, OffsetAndMetadata> entry : topicPartitionOffsetAndMetadataMap.entrySet()) { final String topic = entry.getKey().topic(); if (topic.equals("data") || withRepartitioning && topic.equals("repartition")) { committedOffsets.put(entry.getKey(), entry.getValue().offset()); } } return committedOffsets; } private static Map<String, Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>>> getRecords(final KafkaConsumer<byte[], byte[]> consumer, final Map<TopicPartition, Long> readEndOffsets, final boolean withRepartitioning, final boolean isInputTopic) { System.out.println("read end offset: " + readEndOffsets); final Map<String, Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>>> recordPerTopicPerPartition = new HashMap<>(); final Map<TopicPartition, Long> maxReceivedOffsetPerPartition = new HashMap<>(); final Map<TopicPartition, Long> maxConsumerPositionPerPartition = new HashMap<>(); long maxWaitTime = System.currentTimeMillis() + MAX_IDLE_TIME_MS; boolean allRecordsReceived = false; while (!allRecordsReceived && System.currentTimeMillis() < maxWaitTime) { final ConsumerRecords<byte[], byte[]> receivedRecords = consumer.poll(Duration.ofSeconds(1L)); for (final ConsumerRecord<byte[], byte[]> record : receivedRecords) { maxWaitTime = System.currentTimeMillis() + MAX_IDLE_TIME_MS; final TopicPartition tp = new TopicPartition(record.topic(), record.partition()); maxReceivedOffsetPerPartition.put(tp, record.offset()); final long readEndOffset = readEndOffsets.get(tp); if (record.offset() < readEndOffset) { addRecord(record, recordPerTopicPerPartition, withRepartitioning); } else if (!isInputTopic) { throw new RuntimeException("FAIL: did receive more records than expected for " + tp + " (expected EOL offset: " + readEndOffset + "; current offset: " + record.offset()); } } for (final TopicPartition tp : readEndOffsets.keySet()) { maxConsumerPositionPerPartition.put(tp, consumer.position(tp)); if (consumer.position(tp) >= readEndOffsets.get(tp)) { consumer.pause(Collections.singletonList(tp)); } } allRecordsReceived = consumer.paused().size() == readEndOffsets.keySet().size(); } if (!allRecordsReceived) { System.err.println("Pause partitions (ie, received all data): " + consumer.paused()); System.err.println("Max received offset per partition: " + maxReceivedOffsetPerPartition); System.err.println("Max consumer position per partition: " + maxConsumerPositionPerPartition); throw new RuntimeException("FAIL: did not receive all records after " + (MAX_IDLE_TIME_MS / 1000L) + " sec idle time."); } return recordPerTopicPerPartition; } private static void addRecord(final ConsumerRecord<byte[], byte[]> record, final Map<String, Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>>> recordPerTopicPerPartition, final boolean withRepartitioning) { final String topic = record.topic(); final TopicPartition partition = new TopicPartition(topic, record.partition()); if (verifyTopic(topic, withRepartitioning)) { final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> topicRecordsPerPartition = recordPerTopicPerPartition.computeIfAbsent(topic, k -> new HashMap<>()); final List<ConsumerRecord<byte[], byte[]>> records = topicRecordsPerPartition.computeIfAbsent(partition, k -> new ArrayList<>()); records.add(record); } else { throw new RuntimeException("FAIL: received data from unexpected topic: " + record); } } private static boolean verifyTopic(final String topic, final boolean withRepartitioning) { final boolean validTopic = "data".equals(topic) || "echo".equals(topic) || "min".equals(topic) || "sum".equals(topic); if (withRepartitioning) { return validTopic || "repartition".equals(topic) || "max".equals(topic) || "cnt".equals(topic); } return validTopic; } private static void verifyReceivedAllRecords(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> expectedRecords, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> receivedRecords) { if (expectedRecords.size() != receivedRecords.size()) { throw new RuntimeException("Result verification failed. Received " + receivedRecords.size() + " records but expected " + expectedRecords.size()); } final StringDeserializer stringDeserializer = new StringDeserializer(); final IntegerDeserializer integerDeserializer = new IntegerDeserializer(); for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : receivedRecords.entrySet()) { final TopicPartition inputTopicPartition = new TopicPartition("data", partitionRecords.getKey().partition()); final List<ConsumerRecord<byte[], byte[]>> receivedRecordsForPartition = partitionRecords.getValue(); final List<ConsumerRecord<byte[], byte[]>> expectedRecordsForPartition = expectedRecords.get(inputTopicPartition); System.out.println(partitionRecords.getKey() + " with " + receivedRecordsForPartition.size() + ", " + inputTopicPartition + " with " + expectedRecordsForPartition.size()); final Iterator<ConsumerRecord<byte[], byte[]>> expectedRecord = expectedRecordsForPartition.iterator(); RuntimeException exception = null; for (final ConsumerRecord<byte[], byte[]> receivedRecord : receivedRecordsForPartition) { if (!expectedRecord.hasNext()) { exception = new RuntimeException("Result verification failed for " + receivedRecord + " since there's no more expected record"); } final ConsumerRecord<byte[], byte[]> expected = expectedRecord.next(); final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key()); final int receivedValue = integerDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value()); final String expectedKey = stringDeserializer.deserialize(expected.topic(), expected.key()); final int expectedValue = integerDeserializer.deserialize(expected.topic(), expected.value()); if (!receivedKey.equals(expectedKey) || receivedValue != expectedValue) { exception = new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + expectedKey + "," + expectedValue + "> but was <" + receivedKey + "," + receivedValue + ">"); } } if (exception != null) { throw exception; } } } private static void verifyMin(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> minPerTopicPerPartition) { final StringDeserializer stringDeserializer = new StringDeserializer(); final IntegerDeserializer integerDeserializer = new IntegerDeserializer(); final HashMap<String, Integer> currentMinPerKey = new HashMap<>(); for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : minPerTopicPerPartition.entrySet()) { final TopicPartition inputTopicPartition = new TopicPartition("data", partitionRecords.getKey().partition()); final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition); final List<ConsumerRecord<byte[], byte[]>> partitionMin = partitionRecords.getValue(); if (partitionInput.size() != partitionMin.size()) { throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionMin.size()); } final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator(); for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionMin) { final ConsumerRecord<byte[], byte[]> input = inputRecords.next(); final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key()); final int receivedValue = integerDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value()); final String key = stringDeserializer.deserialize(input.topic(), input.key()); final int value = integerDeserializer.deserialize(input.topic(), input.value()); Integer min = currentMinPerKey.get(key); if (min == null) { min = value; } else { min = Math.min(min, value); } currentMinPerKey.put(key, min); if (!receivedKey.equals(key) || receivedValue != min) { throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + min + "> but was <" + receivedKey + "," + receivedValue + ">"); } } } } private static void verifySum(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> minPerTopicPerPartition) { final StringDeserializer stringDeserializer = new StringDeserializer(); final IntegerDeserializer integerDeserializer = new IntegerDeserializer(); final LongDeserializer longDeserializer = new LongDeserializer(); final HashMap<String, Long> currentSumPerKey = new HashMap<>(); for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : minPerTopicPerPartition.entrySet()) { final TopicPartition inputTopicPartition = new TopicPartition("data", partitionRecords.getKey().partition()); final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition); final List<ConsumerRecord<byte[], byte[]>> partitionSum = partitionRecords.getValue(); if (partitionInput.size() != partitionSum.size()) { throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionSum.size()); } final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator(); for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionSum) { final ConsumerRecord<byte[], byte[]> input = inputRecords.next(); final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key()); final long receivedValue = longDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value()); final String key = stringDeserializer.deserialize(input.topic(), input.key()); final int value = integerDeserializer.deserialize(input.topic(), input.value()); Long sum = currentSumPerKey.get(key); if (sum == null) { sum = (long) value; } else { sum += value; } currentSumPerKey.put(key, sum); if (!receivedKey.equals(key) || receivedValue != sum) { throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + sum + "> but was <" + receivedKey + "," + receivedValue + ">"); } } } } private static void verifyMax(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> maxPerTopicPerPartition) { final StringDeserializer stringDeserializer = new StringDeserializer(); final IntegerDeserializer integerDeserializer = new IntegerDeserializer(); final HashMap<String, Integer> currentMinPerKey = new HashMap<>(); for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : maxPerTopicPerPartition.entrySet()) { final TopicPartition inputTopicPartition = new TopicPartition("repartition", partitionRecords.getKey().partition()); final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition); final List<ConsumerRecord<byte[], byte[]>> partitionMax = partitionRecords.getValue(); if (partitionInput.size() != partitionMax.size()) { throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionMax.size()); } final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator(); for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionMax) { final ConsumerRecord<byte[], byte[]> input = inputRecords.next(); final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key()); final int receivedValue = integerDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value()); final String key = stringDeserializer.deserialize(input.topic(), input.key()); final int value = integerDeserializer.deserialize(input.topic(), input.value()); Integer max = currentMinPerKey.get(key); if (max == null) { max = Integer.MIN_VALUE; } max = Math.max(max, value); currentMinPerKey.put(key, max); if (!receivedKey.equals(key) || receivedValue != max) { throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + max + "> but was <" + receivedKey + "," + receivedValue + ">"); } } } } private static void verifyCnt(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> cntPerTopicPerPartition) { final StringDeserializer stringDeserializer = new StringDeserializer(); final LongDeserializer longDeserializer = new LongDeserializer(); final HashMap<String, Long> currentSumPerKey = new HashMap<>(); for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : cntPerTopicPerPartition.entrySet()) { final TopicPartition inputTopicPartition = new TopicPartition("repartition", partitionRecords.getKey().partition()); final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition); final List<ConsumerRecord<byte[], byte[]>> partitionCnt = partitionRecords.getValue(); if (partitionInput.size() != partitionCnt.size()) { throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionCnt.size()); } final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator(); for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionCnt) { final ConsumerRecord<byte[], byte[]> input = inputRecords.next(); final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key()); final long receivedValue = longDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value()); final String key = stringDeserializer.deserialize(input.topic(), input.key()); Long cnt = currentSumPerKey.get(key); if (cnt == null) { cnt = 0L; } currentSumPerKey.put(key, ++cnt); if (!receivedKey.equals(key) || receivedValue != cnt) { throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + cnt + "> but was <" + receivedKey + "," + receivedValue + ">"); } } } } private static void verifyAllTransactionFinished(final KafkaConsumer<byte[], byte[]> consumer, final String kafka, final boolean withRepartitioning) { final String[] topics; if (withRepartitioning) { topics = new String[] {"echo", "min", "sum", "repartition", "max", "cnt"}; } else { topics = new String[] {"echo", "min", "sum"}; } final List<TopicPartition> partitions = getAllPartitions(consumer, topics); consumer.assign(partitions); consumer.seekToEnd(partitions); for (final TopicPartition tp : partitions) { System.out.println(tp + " at position " + consumer.position(tp)); } final Properties consumerProps = new Properties(); consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "consumer-uncommitted"); consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); final long maxWaitTime = System.currentTimeMillis() + MAX_IDLE_TIME_MS; try (final KafkaConsumer<byte[], byte[]> consumerUncommitted = new KafkaConsumer<>(consumerProps)) { while (!partitions.isEmpty() && System.currentTimeMillis() < maxWaitTime) { consumer.seekToEnd(partitions); final Map<TopicPartition, Long> topicEndOffsets = consumerUncommitted.endOffsets(partitions); final Iterator<TopicPartition> iterator = partitions.iterator(); while (iterator.hasNext()) { final TopicPartition topicPartition = iterator.next(); final long position = consumer.position(topicPartition); if (position == topicEndOffsets.get(topicPartition)) { iterator.remove(); System.out.println("Removing " + topicPartition + " at position " + position); } else if (consumer.position(topicPartition) > topicEndOffsets.get(topicPartition)) { throw new IllegalStateException("Offset for partition " + topicPartition + " is larger than topic endOffset: " + position + " > " + topicEndOffsets.get(topicPartition)); } else { System.out.println("Retry " + topicPartition + " at position " + position); } } sleep(1000L); } } if (!partitions.isEmpty()) { throw new RuntimeException("Could not read all verification records. Did not receive any new record within the last " + (MAX_IDLE_TIME_MS / 1000L) + " sec."); } } private static List<TopicPartition> getAllPartitions(final KafkaConsumer<?, ?> consumer, final String... topics) { final ArrayList<TopicPartition> partitions = new ArrayList<>(); for (final String topic : topics) { for (final PartitionInfo info : consumer.partitionsFor(topic)) { partitions.add(new TopicPartition(info.topic(), info.partition())); } } return partitions; } private static ConsumerGroupDescription getConsumerGroupDescription(final Admin adminClient) { final ConsumerGroupDescription description; try { description = adminClient.describeConsumerGroups(Collections.singleton(EosTestClient.APP_ID)) .describedGroups() .get(EosTestClient.APP_ID) .get(10, TimeUnit.SECONDS); } catch (final InterruptedException | ExecutionException | java.util.concurrent.TimeoutException e) { e.printStackTrace(); throw new RuntimeException("Unexpected Exception getting group description", e); } return description; } private static StreamsGroupDescription getStreamsGroupDescription(final Admin adminClient) { final StreamsGroupDescription description; try { description = adminClient.describeStreamsGroups(Collections.singleton(EosTestClient.APP_ID)) .describedGroups() .get(EosTestClient.APP_ID) .get(10, TimeUnit.SECONDS); } catch (final InterruptedException | ExecutionException | java.util.concurrent.TimeoutException e) { e.printStackTrace(); throw new RuntimeException("Unexpected Exception getting group description", e); } return description; } }
googleapis/google-cloud-java
36,773
java-contentwarehouse/proto-google-cloud-contentwarehouse-v1/src/main/java/com/google/cloud/contentwarehouse/v1/CustomWeightsMetadata.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/contentwarehouse/v1/filters.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.contentwarehouse.v1; /** * * * <pre> * To support the custom weighting across document schemas. * </pre> * * Protobuf type {@code google.cloud.contentwarehouse.v1.CustomWeightsMetadata} */ public final class CustomWeightsMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.contentwarehouse.v1.CustomWeightsMetadata) CustomWeightsMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use CustomWeightsMetadata.newBuilder() to construct. private CustomWeightsMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomWeightsMetadata() { weightedSchemaProperties_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CustomWeightsMetadata(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.contentwarehouse.v1.FiltersProto .internal_static_google_cloud_contentwarehouse_v1_CustomWeightsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.contentwarehouse.v1.FiltersProto .internal_static_google_cloud_contentwarehouse_v1_CustomWeightsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata.class, com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata.Builder.class); } public static final int WEIGHTED_SCHEMA_PROPERTIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty> weightedSchemaProperties_; /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty> getWeightedSchemaPropertiesList() { return weightedSchemaProperties_; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.contentwarehouse.v1.WeightedSchemaPropertyOrBuilder> getWeightedSchemaPropertiesOrBuilderList() { return weightedSchemaProperties_; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ @java.lang.Override public int getWeightedSchemaPropertiesCount() { return weightedSchemaProperties_.size(); } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ @java.lang.Override public com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty getWeightedSchemaProperties( int index) { return weightedSchemaProperties_.get(index); } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ @java.lang.Override public com.google.cloud.contentwarehouse.v1.WeightedSchemaPropertyOrBuilder getWeightedSchemaPropertiesOrBuilder(int index) { return weightedSchemaProperties_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < weightedSchemaProperties_.size(); i++) { output.writeMessage(1, weightedSchemaProperties_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < weightedSchemaProperties_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, weightedSchemaProperties_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata)) { return super.equals(obj); } com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata other = (com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata) obj; if (!getWeightedSchemaPropertiesList().equals(other.getWeightedSchemaPropertiesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getWeightedSchemaPropertiesCount() > 0) { hash = (37 * hash) + WEIGHTED_SCHEMA_PROPERTIES_FIELD_NUMBER; hash = (53 * hash) + getWeightedSchemaPropertiesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * To support the custom weighting across document schemas. * </pre> * * Protobuf type {@code google.cloud.contentwarehouse.v1.CustomWeightsMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.contentwarehouse.v1.CustomWeightsMetadata) com.google.cloud.contentwarehouse.v1.CustomWeightsMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.contentwarehouse.v1.FiltersProto .internal_static_google_cloud_contentwarehouse_v1_CustomWeightsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.contentwarehouse.v1.FiltersProto .internal_static_google_cloud_contentwarehouse_v1_CustomWeightsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata.class, com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata.Builder.class); } // Construct using com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (weightedSchemaPropertiesBuilder_ == null) { weightedSchemaProperties_ = java.util.Collections.emptyList(); } else { weightedSchemaProperties_ = null; weightedSchemaPropertiesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.contentwarehouse.v1.FiltersProto .internal_static_google_cloud_contentwarehouse_v1_CustomWeightsMetadata_descriptor; } @java.lang.Override public com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata getDefaultInstanceForType() { return com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata.getDefaultInstance(); } @java.lang.Override public com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata build() { com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata buildPartial() { com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata result = new com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata result) { if (weightedSchemaPropertiesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { weightedSchemaProperties_ = java.util.Collections.unmodifiableList(weightedSchemaProperties_); bitField0_ = (bitField0_ & ~0x00000001); } result.weightedSchemaProperties_ = weightedSchemaProperties_; } else { result.weightedSchemaProperties_ = weightedSchemaPropertiesBuilder_.build(); } } private void buildPartial0(com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata) { return mergeFrom((com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata other) { if (other == com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata.getDefaultInstance()) return this; if (weightedSchemaPropertiesBuilder_ == null) { if (!other.weightedSchemaProperties_.isEmpty()) { if (weightedSchemaProperties_.isEmpty()) { weightedSchemaProperties_ = other.weightedSchemaProperties_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.addAll(other.weightedSchemaProperties_); } onChanged(); } } else { if (!other.weightedSchemaProperties_.isEmpty()) { if (weightedSchemaPropertiesBuilder_.isEmpty()) { weightedSchemaPropertiesBuilder_.dispose(); weightedSchemaPropertiesBuilder_ = null; weightedSchemaProperties_ = other.weightedSchemaProperties_; bitField0_ = (bitField0_ & ~0x00000001); weightedSchemaPropertiesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getWeightedSchemaPropertiesFieldBuilder() : null; } else { weightedSchemaPropertiesBuilder_.addAllMessages(other.weightedSchemaProperties_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty m = input.readMessage( com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.parser(), extensionRegistry); if (weightedSchemaPropertiesBuilder_ == null) { ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.add(m); } else { weightedSchemaPropertiesBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty> weightedSchemaProperties_ = java.util.Collections.emptyList(); private void ensureWeightedSchemaPropertiesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { weightedSchemaProperties_ = new java.util.ArrayList<com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty>( weightedSchemaProperties_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder, com.google.cloud.contentwarehouse.v1.WeightedSchemaPropertyOrBuilder> weightedSchemaPropertiesBuilder_; /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public java.util.List<com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty> getWeightedSchemaPropertiesList() { if (weightedSchemaPropertiesBuilder_ == null) { return java.util.Collections.unmodifiableList(weightedSchemaProperties_); } else { return weightedSchemaPropertiesBuilder_.getMessageList(); } } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public int getWeightedSchemaPropertiesCount() { if (weightedSchemaPropertiesBuilder_ == null) { return weightedSchemaProperties_.size(); } else { return weightedSchemaPropertiesBuilder_.getCount(); } } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty getWeightedSchemaProperties( int index) { if (weightedSchemaPropertiesBuilder_ == null) { return weightedSchemaProperties_.get(index); } else { return weightedSchemaPropertiesBuilder_.getMessage(index); } } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder setWeightedSchemaProperties( int index, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty value) { if (weightedSchemaPropertiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.set(index, value); onChanged(); } else { weightedSchemaPropertiesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder setWeightedSchemaProperties( int index, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder builderForValue) { if (weightedSchemaPropertiesBuilder_ == null) { ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.set(index, builderForValue.build()); onChanged(); } else { weightedSchemaPropertiesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder addWeightedSchemaProperties( com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty value) { if (weightedSchemaPropertiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.add(value); onChanged(); } else { weightedSchemaPropertiesBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder addWeightedSchemaProperties( int index, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty value) { if (weightedSchemaPropertiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.add(index, value); onChanged(); } else { weightedSchemaPropertiesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder addWeightedSchemaProperties( com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder builderForValue) { if (weightedSchemaPropertiesBuilder_ == null) { ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.add(builderForValue.build()); onChanged(); } else { weightedSchemaPropertiesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder addWeightedSchemaProperties( int index, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder builderForValue) { if (weightedSchemaPropertiesBuilder_ == null) { ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.add(index, builderForValue.build()); onChanged(); } else { weightedSchemaPropertiesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder addAllWeightedSchemaProperties( java.lang.Iterable<? extends com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty> values) { if (weightedSchemaPropertiesBuilder_ == null) { ensureWeightedSchemaPropertiesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, weightedSchemaProperties_); onChanged(); } else { weightedSchemaPropertiesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder clearWeightedSchemaProperties() { if (weightedSchemaPropertiesBuilder_ == null) { weightedSchemaProperties_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { weightedSchemaPropertiesBuilder_.clear(); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public Builder removeWeightedSchemaProperties(int index) { if (weightedSchemaPropertiesBuilder_ == null) { ensureWeightedSchemaPropertiesIsMutable(); weightedSchemaProperties_.remove(index); onChanged(); } else { weightedSchemaPropertiesBuilder_.remove(index); } return this; } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder getWeightedSchemaPropertiesBuilder(int index) { return getWeightedSchemaPropertiesFieldBuilder().getBuilder(index); } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public com.google.cloud.contentwarehouse.v1.WeightedSchemaPropertyOrBuilder getWeightedSchemaPropertiesOrBuilder(int index) { if (weightedSchemaPropertiesBuilder_ == null) { return weightedSchemaProperties_.get(index); } else { return weightedSchemaPropertiesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public java.util.List< ? extends com.google.cloud.contentwarehouse.v1.WeightedSchemaPropertyOrBuilder> getWeightedSchemaPropertiesOrBuilderList() { if (weightedSchemaPropertiesBuilder_ != null) { return weightedSchemaPropertiesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(weightedSchemaProperties_); } } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder addWeightedSchemaPropertiesBuilder() { return getWeightedSchemaPropertiesFieldBuilder() .addBuilder( com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.getDefaultInstance()); } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder addWeightedSchemaPropertiesBuilder(int index) { return getWeightedSchemaPropertiesFieldBuilder() .addBuilder( index, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.getDefaultInstance()); } /** * * * <pre> * List of schema and property name. Allows a maximum of 10 schemas to be * specified for relevance boosting. * </pre> * * <code> * repeated .google.cloud.contentwarehouse.v1.WeightedSchemaProperty weighted_schema_properties = 1; * </code> */ public java.util.List<com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder> getWeightedSchemaPropertiesBuilderList() { return getWeightedSchemaPropertiesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder, com.google.cloud.contentwarehouse.v1.WeightedSchemaPropertyOrBuilder> getWeightedSchemaPropertiesFieldBuilder() { if (weightedSchemaPropertiesBuilder_ == null) { weightedSchemaPropertiesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty, com.google.cloud.contentwarehouse.v1.WeightedSchemaProperty.Builder, com.google.cloud.contentwarehouse.v1.WeightedSchemaPropertyOrBuilder>( weightedSchemaProperties_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); weightedSchemaProperties_ = null; } return weightedSchemaPropertiesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.contentwarehouse.v1.CustomWeightsMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.contentwarehouse.v1.CustomWeightsMetadata) private static final com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata(); } public static com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomWeightsMetadata> PARSER = new com.google.protobuf.AbstractParser<CustomWeightsMetadata>() { @java.lang.Override public CustomWeightsMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomWeightsMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomWeightsMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.contentwarehouse.v1.CustomWeightsMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/qpid-broker-j
36,814
broker-plugins/connection-limits/src/test/java/org/apache/qpid/server/user/connection/limits/config/RuleSetTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.qpid.server.user.connection.limits.config; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.security.Principal; import java.time.Duration; import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; import javax.security.auth.Subject; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.apache.qpid.server.logging.EventLogger; import org.apache.qpid.server.model.port.AmqpPort; import org.apache.qpid.server.security.auth.AuthenticatedPrincipal; import org.apache.qpid.server.security.auth.TestPrincipalUtils; import org.apache.qpid.server.security.limit.ConnectionLimitException; import org.apache.qpid.server.security.limit.ConnectionSlot; import org.apache.qpid.server.transport.AMQPConnection; import org.apache.qpid.server.user.connection.limits.config.RuleSet.Builder; import org.apache.qpid.test.utils.UnitTestBase; public class RuleSetTest extends UnitTestBase { private static final String TEST_USER = "user"; private static final String OTHER_USER = "other"; private static final String TEST_GROUP1 = "group1"; private static final String TEST_GROUP2 = "group2"; private static final String OTHER_GROUP = "anotherGroup"; private static final String TEST_PORT = "amqp"; private static final String LIMITER_NAME = "Limiter"; private EventLogger _eventLogger; private AmqpPort<?> _port; private Subject _subject; private Principal _principal; @BeforeEach public void setUp() { _eventLogger = Mockito.mock(EventLogger.class); _subject = TestPrincipalUtils.createTestSubject(TEST_USER, TEST_GROUP1, TEST_GROUP2); for (Principal principal : _subject.getPrincipals()) { if (principal instanceof AuthenticatedPrincipal) { _principal = principal; } } _port = Mockito.mock(AmqpPort.class); Mockito.doReturn(TEST_PORT).when(_port).getName(); } @Test public void testFrequencyLimit_multiplePeriods() { final Duration frequencyPeriod1 = Duration.ofSeconds(1L); final Duration frequencyPeriod2 = Duration.ofSeconds(2L); final Builder builder = RuleSet.newBuilder(LIMITER_NAME, Duration.ofMinutes(1L)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, null, 3, frequencyPeriod1)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, null, 2, frequencyPeriod2)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); final RuleSet ruleSet = builder.build(); assertNotNull(ruleSet); final Instant registrationStart = Instant.now(); try { ruleSet.register(newConnection()).free(); ruleSet.register(newConnection()).free(); } catch (ConnectionLimitException e) { fail("An exception is not expected here"); } final Instant registrationEnd = Instant.now(); Instant before = Instant.now(); do { try { before = Instant.now(); ruleSet.register(newConnection()).free(); assertTrue(Duration.between(registrationStart, Instant.now()).compareTo(frequencyPeriod2) >= 0); break; } catch (ConnectionLimitException e) { assertTrue(Duration.between(registrationEnd, before).compareTo(frequencyPeriod2) <= 0); } } while (Duration.between(registrationEnd, Instant.now()).compareTo(Duration.ofSeconds(3L)) < 0); } @Test public void testGroupConnectionFrequencyLimit_Concurrency() { for (final Integer countLimit : Arrays.asList(57, 176, null)) { for (final int threadCount : new int[]{7, 17, 27}) { testGroupConnectionFrequencyLimit_Concurrency(countLimit, threadCount); } } } private void testGroupConnectionFrequencyLimit_Concurrency(Integer countLimit, int threadCount) { if (countLimit != null) { if (countLimit < 3) { countLimit = 3; } if (countLimit < threadCount) { countLimit = threadCount; } } final Duration frequencyPeriod = Duration.ofDays(3650L); final Builder builder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP2, countLimit, 1000, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP1, null, 2, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, frequencyPeriod)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testParallelThreads(threadCount, builder.build(), true); } @Test public void testGroupConnectionCountLimit_Concurrency() { for (final Duration duration : Arrays.asList(Duration.ofDays(2L), null)) { for (final Integer frequencyLimit : Arrays.asList(200, 77, null)) { for (final int threadCount : new int[]{7, 11, 21}) { testGroupConnectionCountLimit_Concurrency(duration, frequencyLimit, threadCount); } } } } private void testGroupConnectionCountLimit_Concurrency(Duration frequencyPeriod, Integer frequencyLimit, int threadCount) { if (frequencyLimit != null) { if (frequencyLimit < 3) { frequencyLimit = 3; } if (frequencyLimit < threadCount) { frequencyLimit = threadCount; } } final Builder builder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP2, 1000, frequencyLimit, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP1, 2, null, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, frequencyPeriod)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testParallelThreads(threadCount, builder.build(), false); } @Test public void testUserConnectionFrequencyLimit_Concurrency() { for (final Integer countLimit : Arrays.asList(200, 77, null)) { for (int threadCount : new int[]{7, 12, 27}) { testUserConnectionFrequencyLimit_Concurrency(countLimit, threadCount); } } } private void testUserConnectionFrequencyLimit_Concurrency(Integer countLimit, int threadCount) { if (countLimit != null) { if (countLimit < 3) { countLimit = 3; } if (countLimit < threadCount) { countLimit = threadCount; } } final Duration frequencyPeriod = Duration.ofDays(3650L); final Builder builder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, countLimit, 2, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, frequencyPeriod)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testParallelThreads(threadCount, builder.build(), true); } @Test public void testUserConnectionCountLimit_Concurrency() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(10L), Duration.ofHours(1L), null)) { for (final Integer connectionFrequency : Arrays.asList(200, 787, null)) { for (final int threadCount : new int[]{5, 10, 20}) { testUserConnectionCountLimit_Concurrency(duration, connectionFrequency, threadCount); } } } } private void testUserConnectionCountLimit_Concurrency(Duration duration, Integer connectionFrequency, int threadCount) { if (connectionFrequency != null) { if (connectionFrequency < 3) { connectionFrequency = 3; } if (connectionFrequency < threadCount) { connectionFrequency = threadCount; } } final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, TEST_USER, 2, null, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, 2, connectionFrequency, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, duration)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testParallelThreads(threadCount, builder.build(), false); } private void testParallelThreads(int threadCount, RuleSet ruleSet, boolean frequencyTest) { assertNotNull(ruleSet); final AtomicReference<ConnectionSlot> connection1 = new AtomicReference<>(); final AtomicReference<ConnectionLimitException> exception1 = new AtomicReference<>(); final Thread thread1 = new Thread(() -> { try { connection1.set(ruleSet.register(newConnection())); } catch (ConnectionLimitException e) { exception1.set(e); } }); final AtomicReference<ConnectionSlot> connection2 = new AtomicReference<>(); final AtomicReference<ConnectionLimitException> exception2 = new AtomicReference<>(); final Thread thread2 = new Thread(() -> { try { connection2.set(ruleSet.register(newConnection())); } catch (ConnectionLimitException e) { exception2.set(e); } }); try { thread1.start(); thread2.start(); thread1.join(300000L); thread2.join(300000L); } catch (InterruptedException e) { thread1.interrupt(); thread2.interrupt(); return; } assertNotNull(connection1.get()); assertNull(exception1.get()); assertNotNull(connection2.get()); assertNull(exception2.get()); int positive = runRegistration(ruleSet, threadCount); if (positive < 0) { return; } assertEquals(0, positive); final Thread deThread1 = new Thread(() -> connection1.get().free()); final Thread deThread2 = new Thread(() -> connection2.get().free()); try { deThread1.start(); deThread2.start(); deThread1.join(300000L); deThread2.join(300000L); } catch (InterruptedException e) { deThread1.interrupt(); deThread2.interrupt(); return; } positive = runRegistration(ruleSet, threadCount); if (positive < 0) { return; } if (frequencyTest) { assertEquals(0, positive); } else { assertEquals(2, positive); } } private int runRegistration(RuleSet ruleSet, int threadCount) { final AtomicInteger positive = new AtomicInteger(threadCount); final Thread[] threads = new Thread[threadCount]; for (int i = 0; i < threads.length; i++) { threads[i] = new Thread(() -> { try { ruleSet.register(newConnection()); } catch (ConnectionLimitException e) { positive.decrementAndGet(); } }); } try { Arrays.stream(threads).forEach(Thread::start); for (final Thread thread : threads) { thread.join(300000L); } } catch (InterruptedException e) { Arrays.stream(threads).forEach(Thread::interrupt); return -1; } return positive.get(); } @Test public void testUserConnectionCountLimit() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(1L), null)) { for (final Integer frequencyLimit : Arrays.asList(211, null, 45)) { testUserConnectionCountLimit(duration, frequencyLimit); } } } private void testUserConnectionCountLimit(Duration duration, Integer frequencyLimit) { if (frequencyLimit != null && frequencyLimit < 3) { frequencyLimit = 3; } final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule( RulePredicates.ALL_PORTS, TEST_USER, 2, null, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, 2, frequencyLimit, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, duration)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testConnectionCountLimit2(builder.build()); testConnectionCountLimit2(builder.logAllMessages(true).build()); testConnectionCountLimit2(builder.logAllMessages(false).build()); } @Test public void testGroupConnectionCountLimit() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(2L), null)) { for (final Integer frequencyLimit : Arrays.asList(217, null, 47)) { testGroupConnectionCountLimit(duration, frequencyLimit); } } } private void testGroupConnectionCountLimit(Duration duration, Integer frequencyLimit) { if (frequencyLimit != null && frequencyLimit < 3) { frequencyLimit = 3; } final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP2, 1000, frequencyLimit, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP1, 2, null, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, duration)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testConnectionCountLimit2(builder.build()); testConnectionCountLimit2(builder.logAllMessages(true).build()); testConnectionCountLimit2(builder.logAllMessages(false).build()); } @Test public void testDefaultConnectionCountLimit() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(12L), null)) { for (final Integer frequencyLimit : Arrays.asList(117, null, 147)) { testDefaultConnectionCountLimit(duration, frequencyLimit); } } } private void testDefaultConnectionCountLimit(Duration duration, Integer frequencyLimit) { if (frequencyLimit != null && frequencyLimit < 3) { frequencyLimit = 3; } final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, TEST_USER, 2, frequencyLimit, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS, 2, null, duration)); testConnectionCountLimit2(builder.build()); testConnectionCountLimit2(builder.logAllMessages(true).build()); testConnectionCountLimit2(builder.logAllMessages(false).build()); } private void testConnectionCountLimit2(RuleSet ruleSet) { assertNotNull(ruleSet); ConnectionSlot connection1 = null; ConnectionSlot connection2 = null; try { connection1 = ruleSet.register(newConnection()); connection2 = ruleSet.register(newConnection()); } catch (ConnectionLimitException e) { fail("No exception is expected: " + e.getMessage()); } assertNotNull(connection1); assertNotNull(connection2); try { ruleSet.register(newConnection()); fail("An exception is expected"); } catch (ConnectionLimitException e) { assertEquals("User user breaks connection count limit 2 on port amqp", e.getMessage()); } connection1.free(); ConnectionSlot connection3 = null; try { connection3 = ruleSet.register(newConnection()); } catch (ConnectionLimitException e) { fail("No exception is expected: " + e.getMessage()); } assertNotNull(connection3); connection2.free(); connection3.free(); } @Test public void testBlockedUser() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(1L), null)) { testBlockedUser(duration); } } private void testBlockedUser(Duration duration) { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newBlockingRule(TEST_PORT, TEST_USER)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1000, 1000, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1000, 1000, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS, 1000, 1000, duration)); testBlocked(builder.build()); testBlocked(builder.logAllMessages(true).build()); testBlocked(builder.logAllMessages(false).build()); } @Test public void testBlockedGroup() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(1L), null)) { testBlockedGroup(duration); } } private void testBlockedGroup(Duration duration) { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP1, 10000, null, duration)); builder.addRule(Rule.newBlockingRule(TEST_PORT, TEST_GROUP2)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1000, 1000, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1000, 1000, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS, 10000, 10000, duration)); testBlocked(builder.build()); testBlocked(builder.logAllMessages(true).build()); testBlocked(builder.logAllMessages(false).build()); } @Test public void testBlockedByDefault() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(1L), null)) { testBlockedByDefault(duration); } } private void testBlockedByDefault(Duration duration) { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newBlockingRule(TEST_PORT, RulePredicates.ALL_USERS)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS, 10000, 10000, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1000, 1000, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1000, 1000, duration)); testBlocked(builder.build()); testBlocked(builder.logAllMessages(true).build()); testBlocked(builder.logAllMessages(false).build()); } private void testBlocked(RuleSet ruleSet) { assertNotNull(ruleSet); ConnectionSlot connection = null; try { connection = ruleSet.register(newConnection()); fail("An exception is expected"); } catch (ConnectionLimitException e) { assertEquals("User user is blocked on port amqp", e.getMessage()); } assertNull(connection); } @Test public void testUserConnectionFrequencyLimit() { for (final Integer countLimit : Arrays.asList(300, 200, null)) { testUserConnectionFrequencyLimit(countLimit); } } private void testUserConnectionFrequencyLimit(Integer countLimit) { if (countLimit != null && countLimit < 3) { countLimit = 3; } final Duration frequencyPeriod = Duration.ofDays(3650L); final Builder builder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, countLimit, 2, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, frequencyPeriod)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testConnectionFrequencyLimit2(builder.build()); testConnectionFrequencyLimit2(builder.logAllMessages(true).build()); testConnectionFrequencyLimit2(builder.logAllMessages(false).build()); } @Test public void testGroupConnectionFrequencyLimit() { for (final Integer countLimit : Arrays.asList(300, 200, null)) { testGroupConnectionFrequencyLimit(countLimit); } } private void testGroupConnectionFrequencyLimit(Integer countLimit) { if (countLimit != null && countLimit < 3) { countLimit = 3; } final Duration frequencyPeriod = Duration.ofDays(3650L); final Builder builder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP2, countLimit, 1000, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_GROUP1, null, 2, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, frequencyPeriod)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); final RuleSet ruleSet = builder.build(); testConnectionFrequencyLimit2(ruleSet); } @Test public void testDefaultConnectionFrequencyLimit() { for (final Integer countLimit : Arrays.asList(300, 200, null)) { testDefaultConnectionFrequencyLimit(countLimit); } } private void testDefaultConnectionFrequencyLimit(Integer countLimit) { if (countLimit != null && countLimit < 3) { countLimit = 3; } final Duration frequencyPeriod = Duration.ofDays(3650L); final Builder builder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, RulePredicates.ALL_USERS, null, 2, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS, countLimit, 2, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP, 1, 1, frequencyPeriod)); final RuleSet ruleSet = builder.build(); testConnectionFrequencyLimit2(ruleSet); } private void testConnectionFrequencyLimit2(RuleSet ruleSet) { assertNotNull(ruleSet); ConnectionSlot connection1 = null; ConnectionSlot connection2 = null; try { connection1 = ruleSet.register(newConnection()); connection2 = ruleSet.register(newConnection()); } catch (ConnectionLimitException e) { fail("An exception is not expected"); } assertNotNull(connection1); assertNotNull(connection2); try { ruleSet.register(newConnection()); fail("An exception is expected here"); } catch (ConnectionLimitException e) { assertTrue(Pattern.matches("User user breaks connection frequency limit 2 per \\d+ s on port amqp", e.getMessage())); } connection1.free(); connection2.free(); try { ruleSet.register(newConnection()); fail("An exception is expected here"); } catch (ConnectionLimitException e) { assertTrue(Pattern.matches("User user breaks connection frequency limit 2 per \\d+ s on port amqp", e.getMessage())); } } @Test public void testNoLimits() { for (final Duration duration : Arrays.asList(null, Duration.ofNanos(1L), Duration.ofMillis(1L), Duration.ofMinutes(1L), Duration.ofDays(1L))) { testNoLimits(duration); } } private void testNoLimits(Duration duration) { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newBlockingRule(TEST_PORT, OTHER_USER)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, OTHER_GROUP)); final RuleSet ruleSet = builder.build(); assertNotNull(ruleSet); ConnectionSlot connection1 = null; ConnectionSlot connection2 = null; ConnectionSlot connection3 = null; try { connection1 = ruleSet.register(newConnection()); connection2 = ruleSet.register(newConnection()); connection3 = ruleSet.register(newConnection()); } catch (ConnectionLimitException e) { fail("An exception is not expected here"); } assertNotNull(connection1); assertNotNull(connection2); assertNotNull(connection3); connection1.free(); connection2.free(); connection3.free(); } @Test public void testRegisterNullUser() { for (final Duration duration : Arrays.asList(null, Duration.ofMinutes(1L), Duration.ofDays(1L))) { testRegisterNullUser(duration); } } private void testRegisterNullUser(Duration duration) { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); final RuleSet ruleSet = builder.build(); assertNotNull(ruleSet); final AMQPConnection<?> connection = Mockito.mock(AMQPConnection.class); Mockito.doReturn(_port).when(connection).getPort(); Mockito.doReturn(_subject).when(connection).getSubject(); Mockito.doReturn(_eventLogger).when(connection).getEventLogger(); try { ruleSet.register(connection); fail("An exception is expected"); } catch (ConnectionLimitException e) { assertEquals("Unauthorized connection is forbidden", e.getMessage()); } } @Test public void testRegisterNullSubject() { for (final Duration duration : Arrays.asList(null, Duration.ofMinutes(1L), Duration.ofDays(1L))) { testRegisterNullSubject(duration); } } private void testRegisterNullSubject(Duration duration) { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, TEST_GROUP1, 1000, 1000, duration)); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, TEST_GROUP2, 1000, 1000, duration)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); final RuleSet ruleSet = builder.build(); assertNotNull(ruleSet); final AMQPConnection<?> connection = Mockito.mock(AMQPConnection.class); Mockito.doReturn(_port).when(connection).getPort(); Mockito.doReturn(_principal).when(connection).getAuthorizedPrincipal(); Mockito.doReturn(_eventLogger).when(connection).getEventLogger(); try { ruleSet.register(connection); fail("An exception is expected"); } catch (ConnectionLimitException e) { assertEquals("User user is blocked on port amqp", e.getMessage()); } } private AMQPConnection<?> newConnection() { final AMQPConnection<?> connection = Mockito.mock(AMQPConnection.class); Mockito.doReturn(_port).when(connection).getPort(); Mockito.doReturn(_subject).when(connection).getSubject(); Mockito.doReturn(_principal).when(connection).getAuthorizedPrincipal(); Mockito.doReturn(_eventLogger).when(connection).getEventLogger(); return connection; } @Test public void testBuilder_AddNull() { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, Duration.ofMillis(2L)); builder.addRule(null); builder.addRules(null); final RuleSet ruleSet = builder.build(); assertNotNull(ruleSet); final ConnectionSlot connection1 = ruleSet.register(newConnection()); assertNotNull(connection1); final ConnectionSlot connection2 = ruleSet.register(newConnection()); assertNotNull(connection2); connection1.free(); connection2.free(); } @Test public void testAppend_CountLimit() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(1L), null)) { for (final Integer frequencyLimit : Arrays.asList(211, null, 45)) { testAppend_CountLimit(duration, frequencyLimit); } } } private void testAppend_CountLimit(Duration duration, Integer frequencyLimit) { if (frequencyLimit != null && frequencyLimit < 3) { frequencyLimit = 3; } final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, TEST_USER, 200, 20000, duration)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, duration)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); final Builder secondaryBuilder = RuleSet.newBuilder(LIMITER_NAME, duration); secondaryBuilder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, 2, frequencyLimit, duration)); secondaryBuilder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testConnectionCountLimit2((RuleSet) builder.build().append(secondaryBuilder.build())); } @Test public void testAppend_FrequencyLimit() { for (final Integer countLimit : Arrays.asList(300, 200, null)) { testAppend_FrequencyLimit(countLimit); } } private void testAppend_FrequencyLimit(Integer countLimit) { if (countLimit != null && countLimit < 3) { countLimit = 3; } final Duration frequencyPeriod = Duration.ofDays(3650L); final Builder builder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); builder.addRule(Rule.newNonBlockingRule(RulePredicates.ALL_PORTS, TEST_USER, 200, 20000, frequencyPeriod)); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, OTHER_USER, 1, 1, frequencyPeriod)); builder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); final Builder secondaryBuilder = RuleSet.newBuilder(LIMITER_NAME, frequencyPeriod); secondaryBuilder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, countLimit, 2, frequencyPeriod)); secondaryBuilder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testConnectionFrequencyLimit2((RuleSet) builder.build().append(secondaryBuilder.build())); } @Test public void testAppend_BlockedUser() { for (final Duration duration : Arrays.asList(Duration.ofMinutes(11L), Duration.ofDays(1L), null)) { testAppend_BlockedUser(duration); } } private void testAppend_BlockedUser(Duration duration) { final Builder builder = RuleSet.newBuilder(LIMITER_NAME, duration); builder.addRule(Rule.newNonBlockingRule(TEST_PORT, TEST_USER, 1000, 1000, duration)); final Builder secondaryBuilder = RuleSet.newBuilder(LIMITER_NAME, Duration.ofDays(1L)); secondaryBuilder.addRule(Rule.newBlockingRule(RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS)); testBlocked((RuleSet) builder.build().append(secondaryBuilder.build())); } @Test public void testName() { final RuleSet ruleSet = RuleSet.newBuilder(LIMITER_NAME, Duration.ofMinutes(1L)).build(); assertEquals(LIMITER_NAME, ruleSet.toString()); } @Test public void testInvalidRuleWithoutPeriod() { RuleSet.Builder builder = RuleSet.newBuilder(LIMITER_NAME, Duration.ofMinutes(1L)); final NonBlockingRule rule = Rule.newNonBlockingRule( RulePredicates.ALL_PORTS, RulePredicates.ALL_USERS, 2, 2, null); try { builder.addRule(rule); fail("An exception is expected, the rule is not valid."); } catch (IllegalArgumentException e) { assertNotNull(e.getMessage()); } try { builder.addRules(Collections.singletonList(rule)); fail("An exception is expected, the rule is not valid."); } catch (IllegalArgumentException e) { assertNotNull(e.getMessage()); } } }
googleapis/google-cloud-java
36,629
java-datacatalog/proto-google-cloud-datacatalog-v1beta1/src/main/java/com/google/cloud/datacatalog/v1beta1/ExportTaxonomiesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1beta1/policytagmanagerserialization.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1beta1; /** * * * <pre> * Request message for * [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest} */ public final class ExportTaxonomiesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest) ExportTaxonomiesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ExportTaxonomiesRequest.newBuilder() to construct. private ExportTaxonomiesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExportTaxonomiesRequest() { parent_ = ""; taxonomies_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExportTaxonomiesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerSerializationProto .internal_static_google_cloud_datacatalog_v1beta1_ExportTaxonomiesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerSerializationProto .internal_static_google_cloud_datacatalog_v1beta1_ExportTaxonomiesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest.class, com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest.Builder.class); } private int destinationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object destination_; public enum DestinationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { SERIALIZED_TAXONOMIES(3), DESTINATION_NOT_SET(0); private final int value; private DestinationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DestinationCase valueOf(int value) { return forNumber(value); } public static DestinationCase forNumber(int value) { switch (value) { case 3: return SERIALIZED_TAXONOMIES; case 0: return DESTINATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public DestinationCase getDestinationCase() { return DestinationCase.forNumber(destinationCase_); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Resource name of the project that taxonomies to be exported * will share. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Resource name of the project that taxonomies to be exported * will share. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TAXONOMIES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList taxonomies_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the taxonomies. */ public com.google.protobuf.ProtocolStringList getTaxonomiesList() { return taxonomies_; } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The count of taxonomies. */ public int getTaxonomiesCount() { return taxonomies_.size(); } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The taxonomies at the given index. */ public java.lang.String getTaxonomies(int index) { return taxonomies_.get(index); } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the taxonomies at the given index. */ public com.google.protobuf.ByteString getTaxonomiesBytes(int index) { return taxonomies_.getByteString(index); } public static final int SERIALIZED_TAXONOMIES_FIELD_NUMBER = 3; /** * * * <pre> * Export taxonomies as serialized taxonomies. * </pre> * * <code>bool serialized_taxonomies = 3;</code> * * @return Whether the serializedTaxonomies field is set. */ @java.lang.Override public boolean hasSerializedTaxonomies() { return destinationCase_ == 3; } /** * * * <pre> * Export taxonomies as serialized taxonomies. * </pre> * * <code>bool serialized_taxonomies = 3;</code> * * @return The serializedTaxonomies. */ @java.lang.Override public boolean getSerializedTaxonomies() { if (destinationCase_ == 3) { return (java.lang.Boolean) destination_; } return false; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } for (int i = 0; i < taxonomies_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, taxonomies_.getRaw(i)); } if (destinationCase_ == 3) { output.writeBool(3, (boolean) ((java.lang.Boolean) destination_)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } { int dataSize = 0; for (int i = 0; i < taxonomies_.size(); i++) { dataSize += computeStringSizeNoTag(taxonomies_.getRaw(i)); } size += dataSize; size += 1 * getTaxonomiesList().size(); } if (destinationCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 3, (boolean) ((java.lang.Boolean) destination_)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest)) { return super.equals(obj); } com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest other = (com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getTaxonomiesList().equals(other.getTaxonomiesList())) return false; if (!getDestinationCase().equals(other.getDestinationCase())) return false; switch (destinationCase_) { case 3: if (getSerializedTaxonomies() != other.getSerializedTaxonomies()) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (getTaxonomiesCount() > 0) { hash = (37 * hash) + TAXONOMIES_FIELD_NUMBER; hash = (53 * hash) + getTaxonomiesList().hashCode(); } switch (destinationCase_) { case 3: hash = (37 * hash) + SERIALIZED_TAXONOMIES_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSerializedTaxonomies()); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest) com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerSerializationProto .internal_static_google_cloud_datacatalog_v1beta1_ExportTaxonomiesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerSerializationProto .internal_static_google_cloud_datacatalog_v1beta1_ExportTaxonomiesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest.class, com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest.Builder.class); } // Construct using com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; taxonomies_ = com.google.protobuf.LazyStringArrayList.emptyList(); destinationCase_ = 0; destination_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1beta1.PolicyTagManagerSerializationProto .internal_static_google_cloud_datacatalog_v1beta1_ExportTaxonomiesRequest_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest build() { com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest buildPartial() { com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest result = new com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0( com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { taxonomies_.makeImmutable(); result.taxonomies_ = taxonomies_; } } private void buildPartialOneofs( com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest result) { result.destinationCase_ = destinationCase_; result.destination_ = this.destination_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest) { return mergeFrom((com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest other) { if (other == com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.taxonomies_.isEmpty()) { if (taxonomies_.isEmpty()) { taxonomies_ = other.taxonomies_; bitField0_ |= 0x00000002; } else { ensureTaxonomiesIsMutable(); taxonomies_.addAll(other.taxonomies_); } onChanged(); } switch (other.getDestinationCase()) { case SERIALIZED_TAXONOMIES: { setSerializedTaxonomies(other.getSerializedTaxonomies()); break; } case DESTINATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); ensureTaxonomiesIsMutable(); taxonomies_.add(s); break; } // case 18 case 24: { destination_ = input.readBool(); destinationCase_ = 3; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int destinationCase_ = 0; private java.lang.Object destination_; public DestinationCase getDestinationCase() { return DestinationCase.forNumber(destinationCase_); } public Builder clearDestination() { destinationCase_ = 0; destination_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Resource name of the project that taxonomies to be exported * will share. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Resource name of the project that taxonomies to be exported * will share. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Resource name of the project that taxonomies to be exported * will share. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Resource name of the project that taxonomies to be exported * will share. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Resource name of the project that taxonomies to be exported * will share. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList taxonomies_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureTaxonomiesIsMutable() { if (!taxonomies_.isModifiable()) { taxonomies_ = new com.google.protobuf.LazyStringArrayList(taxonomies_); } bitField0_ |= 0x00000002; } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the taxonomies. */ public com.google.protobuf.ProtocolStringList getTaxonomiesList() { taxonomies_.makeImmutable(); return taxonomies_; } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The count of taxonomies. */ public int getTaxonomiesCount() { return taxonomies_.size(); } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The taxonomies at the given index. */ public java.lang.String getTaxonomies(int index) { return taxonomies_.get(index); } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the taxonomies at the given index. */ public com.google.protobuf.ByteString getTaxonomiesBytes(int index) { return taxonomies_.getByteString(index); } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index to set the value at. * @param value The taxonomies to set. * @return This builder for chaining. */ public Builder setTaxonomies(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTaxonomiesIsMutable(); taxonomies_.set(index, value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The taxonomies to add. * @return This builder for chaining. */ public Builder addTaxonomies(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTaxonomiesIsMutable(); taxonomies_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param values The taxonomies to add. * @return This builder for chaining. */ public Builder addAllTaxonomies(java.lang.Iterable<java.lang.String> values) { ensureTaxonomiesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, taxonomies_); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearTaxonomies() { taxonomies_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); ; onChanged(); return this; } /** * * * <pre> * Required. Resource names of the taxonomies to be exported. * </pre> * * <code> * repeated string taxonomies = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes of the taxonomies to add. * @return This builder for chaining. */ public Builder addTaxonomiesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureTaxonomiesIsMutable(); taxonomies_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Export taxonomies as serialized taxonomies. * </pre> * * <code>bool serialized_taxonomies = 3;</code> * * @return Whether the serializedTaxonomies field is set. */ public boolean hasSerializedTaxonomies() { return destinationCase_ == 3; } /** * * * <pre> * Export taxonomies as serialized taxonomies. * </pre> * * <code>bool serialized_taxonomies = 3;</code> * * @return The serializedTaxonomies. */ public boolean getSerializedTaxonomies() { if (destinationCase_ == 3) { return (java.lang.Boolean) destination_; } return false; } /** * * * <pre> * Export taxonomies as serialized taxonomies. * </pre> * * <code>bool serialized_taxonomies = 3;</code> * * @param value The serializedTaxonomies to set. * @return This builder for chaining. */ public Builder setSerializedTaxonomies(boolean value) { destinationCase_ = 3; destination_ = value; onChanged(); return this; } /** * * * <pre> * Export taxonomies as serialized taxonomies. * </pre> * * <code>bool serialized_taxonomies = 3;</code> * * @return This builder for chaining. */ public Builder clearSerializedTaxonomies() { if (destinationCase_ == 3) { destinationCase_ = 0; destination_ = null; onChanged(); } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest) private static final com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest(); } public static com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExportTaxonomiesRequest> PARSER = new com.google.protobuf.AbstractParser<ExportTaxonomiesRequest>() { @java.lang.Override public ExportTaxonomiesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExportTaxonomiesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExportTaxonomiesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.ExportTaxonomiesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,670
java-apigee-connect/proto-google-cloud-apigee-connect-v1/src/main/java/com/google/cloud/apigeeconnect/v1/ListConnectionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apigeeconnect/v1/connection.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apigeeconnect.v1; /** * * * <pre> * The response for * [ListConnections][Management.ListConnections]. * </pre> * * Protobuf type {@code google.cloud.apigeeconnect.v1.ListConnectionsResponse} */ public final class ListConnectionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apigeeconnect.v1.ListConnectionsResponse) ListConnectionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListConnectionsResponse.newBuilder() to construct. private ListConnectionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListConnectionsResponse() { connections_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListConnectionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apigeeconnect.v1.ConnectionProto .internal_static_google_cloud_apigeeconnect_v1_ListConnectionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apigeeconnect.v1.ConnectionProto .internal_static_google_cloud_apigeeconnect_v1_ListConnectionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apigeeconnect.v1.ListConnectionsResponse.class, com.google.cloud.apigeeconnect.v1.ListConnectionsResponse.Builder.class); } public static final int CONNECTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.apigeeconnect.v1.Connection> connections_; /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.apigeeconnect.v1.Connection> getConnectionsList() { return connections_; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.apigeeconnect.v1.ConnectionOrBuilder> getConnectionsOrBuilderList() { return connections_; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ @java.lang.Override public int getConnectionsCount() { return connections_.size(); } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ @java.lang.Override public com.google.cloud.apigeeconnect.v1.Connection getConnections(int index) { return connections_.get(index); } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ @java.lang.Override public com.google.cloud.apigeeconnect.v1.ConnectionOrBuilder getConnectionsOrBuilder(int index) { return connections_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < connections_.size(); i++) { output.writeMessage(1, connections_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < connections_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, connections_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apigeeconnect.v1.ListConnectionsResponse)) { return super.equals(obj); } com.google.cloud.apigeeconnect.v1.ListConnectionsResponse other = (com.google.cloud.apigeeconnect.v1.ListConnectionsResponse) obj; if (!getConnectionsList().equals(other.getConnectionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getConnectionsCount() > 0) { hash = (37 * hash) + CONNECTIONS_FIELD_NUMBER; hash = (53 * hash) + getConnectionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.apigeeconnect.v1.ListConnectionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response for * [ListConnections][Management.ListConnections]. * </pre> * * Protobuf type {@code google.cloud.apigeeconnect.v1.ListConnectionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apigeeconnect.v1.ListConnectionsResponse) com.google.cloud.apigeeconnect.v1.ListConnectionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apigeeconnect.v1.ConnectionProto .internal_static_google_cloud_apigeeconnect_v1_ListConnectionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apigeeconnect.v1.ConnectionProto .internal_static_google_cloud_apigeeconnect_v1_ListConnectionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apigeeconnect.v1.ListConnectionsResponse.class, com.google.cloud.apigeeconnect.v1.ListConnectionsResponse.Builder.class); } // Construct using com.google.cloud.apigeeconnect.v1.ListConnectionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (connectionsBuilder_ == null) { connections_ = java.util.Collections.emptyList(); } else { connections_ = null; connectionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apigeeconnect.v1.ConnectionProto .internal_static_google_cloud_apigeeconnect_v1_ListConnectionsResponse_descriptor; } @java.lang.Override public com.google.cloud.apigeeconnect.v1.ListConnectionsResponse getDefaultInstanceForType() { return com.google.cloud.apigeeconnect.v1.ListConnectionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apigeeconnect.v1.ListConnectionsResponse build() { com.google.cloud.apigeeconnect.v1.ListConnectionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apigeeconnect.v1.ListConnectionsResponse buildPartial() { com.google.cloud.apigeeconnect.v1.ListConnectionsResponse result = new com.google.cloud.apigeeconnect.v1.ListConnectionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.apigeeconnect.v1.ListConnectionsResponse result) { if (connectionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { connections_ = java.util.Collections.unmodifiableList(connections_); bitField0_ = (bitField0_ & ~0x00000001); } result.connections_ = connections_; } else { result.connections_ = connectionsBuilder_.build(); } } private void buildPartial0(com.google.cloud.apigeeconnect.v1.ListConnectionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apigeeconnect.v1.ListConnectionsResponse) { return mergeFrom((com.google.cloud.apigeeconnect.v1.ListConnectionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.apigeeconnect.v1.ListConnectionsResponse other) { if (other == com.google.cloud.apigeeconnect.v1.ListConnectionsResponse.getDefaultInstance()) return this; if (connectionsBuilder_ == null) { if (!other.connections_.isEmpty()) { if (connections_.isEmpty()) { connections_ = other.connections_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureConnectionsIsMutable(); connections_.addAll(other.connections_); } onChanged(); } } else { if (!other.connections_.isEmpty()) { if (connectionsBuilder_.isEmpty()) { connectionsBuilder_.dispose(); connectionsBuilder_ = null; connections_ = other.connections_; bitField0_ = (bitField0_ & ~0x00000001); connectionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConnectionsFieldBuilder() : null; } else { connectionsBuilder_.addAllMessages(other.connections_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.apigeeconnect.v1.Connection m = input.readMessage( com.google.cloud.apigeeconnect.v1.Connection.parser(), extensionRegistry); if (connectionsBuilder_ == null) { ensureConnectionsIsMutable(); connections_.add(m); } else { connectionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.apigeeconnect.v1.Connection> connections_ = java.util.Collections.emptyList(); private void ensureConnectionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { connections_ = new java.util.ArrayList<com.google.cloud.apigeeconnect.v1.Connection>(connections_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apigeeconnect.v1.Connection, com.google.cloud.apigeeconnect.v1.Connection.Builder, com.google.cloud.apigeeconnect.v1.ConnectionOrBuilder> connectionsBuilder_; /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public java.util.List<com.google.cloud.apigeeconnect.v1.Connection> getConnectionsList() { if (connectionsBuilder_ == null) { return java.util.Collections.unmodifiableList(connections_); } else { return connectionsBuilder_.getMessageList(); } } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public int getConnectionsCount() { if (connectionsBuilder_ == null) { return connections_.size(); } else { return connectionsBuilder_.getCount(); } } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public com.google.cloud.apigeeconnect.v1.Connection getConnections(int index) { if (connectionsBuilder_ == null) { return connections_.get(index); } else { return connectionsBuilder_.getMessage(index); } } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder setConnections(int index, com.google.cloud.apigeeconnect.v1.Connection value) { if (connectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConnectionsIsMutable(); connections_.set(index, value); onChanged(); } else { connectionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder setConnections( int index, com.google.cloud.apigeeconnect.v1.Connection.Builder builderForValue) { if (connectionsBuilder_ == null) { ensureConnectionsIsMutable(); connections_.set(index, builderForValue.build()); onChanged(); } else { connectionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder addConnections(com.google.cloud.apigeeconnect.v1.Connection value) { if (connectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConnectionsIsMutable(); connections_.add(value); onChanged(); } else { connectionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder addConnections(int index, com.google.cloud.apigeeconnect.v1.Connection value) { if (connectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConnectionsIsMutable(); connections_.add(index, value); onChanged(); } else { connectionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder addConnections( com.google.cloud.apigeeconnect.v1.Connection.Builder builderForValue) { if (connectionsBuilder_ == null) { ensureConnectionsIsMutable(); connections_.add(builderForValue.build()); onChanged(); } else { connectionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder addConnections( int index, com.google.cloud.apigeeconnect.v1.Connection.Builder builderForValue) { if (connectionsBuilder_ == null) { ensureConnectionsIsMutable(); connections_.add(index, builderForValue.build()); onChanged(); } else { connectionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder addAllConnections( java.lang.Iterable<? extends com.google.cloud.apigeeconnect.v1.Connection> values) { if (connectionsBuilder_ == null) { ensureConnectionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, connections_); onChanged(); } else { connectionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder clearConnections() { if (connectionsBuilder_ == null) { connections_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { connectionsBuilder_.clear(); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public Builder removeConnections(int index) { if (connectionsBuilder_ == null) { ensureConnectionsIsMutable(); connections_.remove(index); onChanged(); } else { connectionsBuilder_.remove(index); } return this; } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public com.google.cloud.apigeeconnect.v1.Connection.Builder getConnectionsBuilder(int index) { return getConnectionsFieldBuilder().getBuilder(index); } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public com.google.cloud.apigeeconnect.v1.ConnectionOrBuilder getConnectionsOrBuilder( int index) { if (connectionsBuilder_ == null) { return connections_.get(index); } else { return connectionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public java.util.List<? extends com.google.cloud.apigeeconnect.v1.ConnectionOrBuilder> getConnectionsOrBuilderList() { if (connectionsBuilder_ != null) { return connectionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(connections_); } } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public com.google.cloud.apigeeconnect.v1.Connection.Builder addConnectionsBuilder() { return getConnectionsFieldBuilder() .addBuilder(com.google.cloud.apigeeconnect.v1.Connection.getDefaultInstance()); } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public com.google.cloud.apigeeconnect.v1.Connection.Builder addConnectionsBuilder(int index) { return getConnectionsFieldBuilder() .addBuilder(index, com.google.cloud.apigeeconnect.v1.Connection.getDefaultInstance()); } /** * * * <pre> * A list of clients. * </pre> * * <code>repeated .google.cloud.apigeeconnect.v1.Connection connections = 1;</code> */ public java.util.List<com.google.cloud.apigeeconnect.v1.Connection.Builder> getConnectionsBuilderList() { return getConnectionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apigeeconnect.v1.Connection, com.google.cloud.apigeeconnect.v1.Connection.Builder, com.google.cloud.apigeeconnect.v1.ConnectionOrBuilder> getConnectionsFieldBuilder() { if (connectionsBuilder_ == null) { connectionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apigeeconnect.v1.Connection, com.google.cloud.apigeeconnect.v1.Connection.Builder, com.google.cloud.apigeeconnect.v1.ConnectionOrBuilder>( connections_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); connections_ = null; } return connectionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apigeeconnect.v1.ListConnectionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.apigeeconnect.v1.ListConnectionsResponse) private static final com.google.cloud.apigeeconnect.v1.ListConnectionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apigeeconnect.v1.ListConnectionsResponse(); } public static com.google.cloud.apigeeconnect.v1.ListConnectionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListConnectionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListConnectionsResponse>() { @java.lang.Override public ListConnectionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListConnectionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListConnectionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apigeeconnect.v1.ListConnectionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ctakes
34,387
ctakes-ytex/src/main/java/org/apache/ctakes/ytex/kernel/metric/ConceptSimilarityServiceImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ctakes.ytex.kernel.metric; import com.google.common.collect.ImmutableMap; import net.sf.ehcache.Cache; import net.sf.ehcache.CacheManager; import net.sf.ehcache.Element; import org.apache.commons.cli.*; import org.apache.ctakes.ytex.kernel.ImputedFeatureEvaluator; import org.apache.ctakes.ytex.kernel.InfoContentEvaluator; import org.apache.ctakes.ytex.kernel.IntrinsicInfoContentEvaluator; import org.apache.ctakes.ytex.kernel.SimSvcContextHolder; import org.apache.ctakes.ytex.kernel.dao.ClassifierEvaluationDao; import org.apache.ctakes.ytex.kernel.dao.ConceptDao; import org.apache.ctakes.ytex.kernel.model.ConcRel; import org.apache.ctakes.ytex.kernel.model.ConceptGraph; import org.apache.ctakes.ytex.kernel.model.FeatureRank; import org.apache.ctakes.ytex.kernel.pagerank.PageRankService; import org.slf4j.LoggerFactory; import org.slf4j.Logger; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; import java.io.*; import java.util.*; /** * compute concept similarity * * @author vijay * */ public class ConceptSimilarityServiceImpl implements ConceptSimilarityService { private static final Logger LOGGER = LoggerFactory.getLogger( "ConceptSimilarityServiceImpl" ); private static String formatPaths(List<LCSPath> lcsPaths) { StringBuilder b = new StringBuilder(); Iterator<LCSPath> lcsPathIter = lcsPaths.iterator(); while (lcsPathIter.hasNext()) { LCSPath lcsPath = lcsPathIter.next(); String lcs = lcsPath.getLcs(); b.append(lcs); b.append("="); b.append(lcsPath.toString()); if (lcsPathIter.hasNext()) b.append("|"); } return b.toString(); } @SuppressWarnings("static-access") public static void main(String args[]) throws IOException { Options options = new Options(); options.addOption(OptionBuilder .withArgName("concepts") .hasArg() .withDescription( "concept pairs or a file containing concept pairs. To specify pairs on command line, separate concepts by comma, concept pairs by semicolon. For file, separate concepts by comma or tab, each concept pair on a new line.") .isRequired(true).create("concepts")); options.addOption(OptionBuilder .withArgName("metrics") .hasArg() .withDescription( "comma-separated list of metrics. Valid metrics: " + Arrays.asList(SimilarityMetricEnum.values())) .isRequired(true).create("metrics")); options.addOption(OptionBuilder .withArgName("out") .hasArg() .withDescription( "file to write oputput to. if not specified, output sent to stdout.") .create("out")); options.addOption(OptionBuilder.withArgName("lcs") .withDescription("output lcs and path for each concept pair") .create("lcs")); try { CommandLineParser parser = new GnuParser(); CommandLine line = parser.parse(options, args); String concepts = line.getOptionValue("concepts"); String metrics = line.getOptionValue("metrics"); String out = line.getOptionValue("out"); boolean lcs = line.hasOption("lcs"); PrintStream os = null; try { if (out != null) { os = new PrintStream(new BufferedOutputStream( new FileOutputStream(out))); } else { os = System.out; } List<ConceptPair> conceptPairs = parseConcepts(concepts); List<SimilarityMetricEnum> metricList = parseMetrics(metrics); ConceptSimilarityService simSvc = SimSvcContextHolder .getApplicationContext().getBean( ConceptSimilarityService.class); List<SimilarityInfo> simInfos = lcs ? new ArrayList<SimilarityInfo>( conceptPairs.size()) : null; List<ConceptPairSimilarity> conceptSimMap = simSvc.similarity( conceptPairs, metricList, null, lcs); printSimilarities(conceptPairs, conceptSimMap, metricList, simInfos, lcs, os); // try { // Thread.sleep(60*1000); // } catch (InterruptedException e) { // e.printStackTrace(); // } } finally { if (out != null) { try { os.close(); } catch (Exception e) { } } } } catch (ParseException pe) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( "java " + ConceptSimilarityServiceImpl.class.getName() + " get concept similiarity", options); } } private static List<ConceptPair> parseConcepts(String concepts) throws IOException { BufferedReader r = null; try { List<ConceptPair> conceptPairs = new ArrayList<ConceptPair>(); File f = new File(concepts); if (f.exists()) { r = new BufferedReader(new FileReader(f)); } else { r = new BufferedReader(new StringReader(concepts)); } String line = null; while ((line = r.readLine()) != null) { // for command line, split pairs by semicolon String lines[] = line.split(";"); for (String subline : lines) { String pair[] = subline.split(",|\\t"); if (pair.length != 2) { System.err.println("cannot parse concept pair: " + subline); } else { conceptPairs.add(new ConceptPair(pair[0], pair[1])); } } } return conceptPairs; } finally { if (r != null) r.close(); } } private static List<SimilarityMetricEnum> parseMetrics(String metrics) { String ms[] = metrics.split(","); List<SimilarityMetricEnum> metricSet = new ArrayList<SimilarityMetricEnum>(); for (String metric : ms) { SimilarityMetricEnum m = SimilarityMetricEnum.valueOf(metric); if (m == null) System.err.println("invalid metric: " + ms); else metricSet.add(m); } return metricSet; } private static void printSimilarities(List<ConceptPair> conceptPairs, List<ConceptPairSimilarity> conceptSimList, List<SimilarityMetricEnum> metricList, List<SimilarityInfo> simInfos, boolean lcs, PrintStream os) { // print header os.print("Concept 1\tConcept 2"); for (SimilarityMetricEnum metric : metricList) { os.print("\t"); os.print(metric); } if (lcs) { os.print("\tlcs(s)\tcorpus lcs\tintrinsic lcs\tpaths"); } os.println(); // print content for (ConceptPairSimilarity csim : conceptSimList) { ConceptPair p = csim.getConceptPair(); os.print(p.getConcept1()); os.print("\t"); os.print(p.getConcept2()); for (Double sim : csim.getSimilarities()) { os.print("\t"); if (sim != null) os.print(String.format("%6f", sim)); else os.print(0d); } if (lcs) { SimilarityInfo simInfo = csim.getSimilarityInfo(); os.print("\t"); Iterator<String> lcsIter = simInfo.getLcses().iterator(); while (lcsIter.hasNext()) { os.print(lcsIter.next()); if (lcsIter.hasNext()) os.print('|'); } os.print("\t"); os.print(simInfo.getCorpusLcs() == null ? "" : simInfo .getCorpusLcs()); os.print("\t"); os.print(simInfo.getIntrinsicLcs() == null ? "" : simInfo .getIntrinsicLcs()); os.print("\t"); os.print(formatPaths(simInfo.getLcsPaths())); } os.println(); } } private CacheManager cacheManager; private ConceptGraph cg = null; private ClassifierEvaluationDao classifierEvaluationDao; private ConceptDao conceptDao; private String conceptGraphName; private String conceptSetName; // /** // * information concept cache // */ // private Map<String, Double> corpusICMap = null; private String corpusName; private Map<String, BitSet> cuiTuiMap; // private Map<String, ConceptInfo> conceptInfoMap = null; // private ConceptInfo[] conceptInfoCache; /** * cache to hold lcs's */ private Cache lcsCache; private String lcsImputedType = ImputedFeatureEvaluator.MeasureType.INFOGAIN .getName(); private PageRankService pageRankService; private boolean preload = true; private Map<String, Double> corpusICMap; private Map<SimilarityMetricEnum, SimilarityMetric> similarityMetricMap = null; private PlatformTransactionManager transactionManager; private List<String> tuiList; private void addCuiTuiToMap(Map<String, Set<String>> cuiTuiMap, Map<String, String> tuiMap, String cui, String tui) { // get 'the' tui string if (tuiMap.containsKey(tui)) tui = tuiMap.get(tui); else tuiMap.put(tui, tui); Set<String> tuis = cuiTuiMap.get(cui); if (tuis == null) { tuis = new HashSet<String>(); cuiTuiMap.put(cui, tuis); } tuis.add(tui); } @Override public Object[] getBestLCS(Set<String> lcses, boolean intrinsicIC, Map<String, Double> conceptFilter) { Map<String, Double> lcsICMap = new HashMap<String, Double>(lcses.size()); // if (isPreload()) { // look in conceptInfoMap for info content for (String lcs : lcses) { lcsICMap.put(lcs, getIC(lcs, intrinsicIC)); // } // } else { // // load info content on demand // Map<String, FeatureRank> frMap = getICOnDemand(lcses, // intrinsicIC); // for (Map.Entry<String, FeatureRank> frMapEntry : // frMap.entrySet()) { // lcsICMap.put(frMapEntry.getKey(), frMapEntry.getValue() // .getEvaluation()); // } } if (conceptFilter != null) { double currentBest = -1; Set<String> bestLcses = new HashSet<String>(); for (String lcs : lcses) { if (conceptFilter.containsKey(lcs)) { double lcsEval = conceptFilter.get(lcs); if (currentBest == -1 || lcsEval > currentBest) { bestLcses.clear(); bestLcses.add(lcs); currentBest = lcsEval; } else if (currentBest == lcsEval) { bestLcses.add(lcs); } } } if (currentBest < 0) currentBest = 0d; if (bestLcses.size() > 0) { return this.getBestLCS(bestLcses, lcsICMap); } else { // no lcses made the cut return null; } } else { // unfiltered - get the lowest ic return this.getBestLCS(lcses, lcsICMap); } } public Object[] getBestLCS(Set<String> lcses, Map<String, Double> icMap) { double ic = -1; String bestLCS = null; for (String lcs : lcses) { Double ictmp = icMap.get(lcs); if (ictmp != null && ic < ictmp.doubleValue()) { ic = ictmp; bestLCS = lcs; } } if (ic < 0) ic = 0d; return new Object[] { bestLCS, ic }; } // /** // * return lin measure. optionally filter lin measure so that only concepts // * that have an lcs that is relevant to the classification task have a // * non-zero lin measure. // * // * relevant concepts are those whose evaluation wrt the label exceeds a // * threshold. // * // * @param concept1 // * @param concept2 // * @param label // * if not null, then filter lcses. // * @param lcsMinEvaluation // * if gt; 0, then filter lcses. this is the threshold. // * @return 0 - no lcs, or no lcs that meets the threshold. // */ // @Override // public double filteredLin(String concept1, String concept2, // Map<String, Double> conceptFilter) { // double ic1 = getIC(concept1); // double ic2 = getIC(concept2); // // lin not defined if one of the concepts doesn't exist in the corpus // if (ic1 == 0 || ic2 == 0) // return 0; // double denom = getIC(concept1) + getIC(concept2); // if (denom != 0) { // ConcRel cr1 = cg.getConceptMap().get(concept1); // ConcRel cr2 = cg.getConceptMap().get(concept2); // if (cr1 != null && cr2 != null) { // Set<String> lcses = new HashSet<String>(); // int dist = getLCSFromCache(cr1, cr2, lcses); // if (dist > 0) { // double ic = getBestIC(lcses, conceptFilter); // return 2 * ic / denom; // } // } // } // return 0; // } // /** // * get the information content for the concept with the highest evaluation // * greater than a specified threshold. // * // * If threshold 0, get the lowest IC of all the lcs's. // * // * @param lcses // * the least common subsumers of a pair of concepts // * @param label // * label against which feature was evaluated // * @param lcsMinEvaluation // * threshold that the feature has to exceed. 0 for no filtering. // * @return 0 if no lcs that makes the cut. else find the lcs(es) with the // * maximal evaluation, and return getIC on these lcses. // * // * @see #getIC(Iterable) // */ // private double getBestIC(Set<String> lcses, // Map<String, Double> conceptFilter) { // if (conceptFilter != null) { // double currentBest = -1; // Set<String> bestLcses = new HashSet<String>(); // for (String lcs : lcses) { // if (conceptFilter.containsKey(lcs)) { // double lcsEval = conceptFilter.get(lcs); // if (currentBest == -1 || lcsEval > currentBest) { // bestLcses.clear(); // bestLcses.add(lcs); // currentBest = lcsEval; // } else if (currentBest == lcsEval) { // bestLcses.add(lcs); // } // } // } // if (bestLcses.size() > 0) { // return this.getIC(bestLcses); // } // } else { // // unfiltered - get the lowest ic // return this.getIC(lcses); // } // return 0; // } // private ConceptInfo getPreloadedConceptInfo(String conceptId) { // ConcRel cr = cg.getConceptMap().get(conceptId); // if (cr != null) { // return this.conceptInfoCache[cr.getNodeIndex()]; // } // return null; // } public CacheManager getCacheManager() { return cacheManager; } public ClassifierEvaluationDao getClassifierEvaluationDao() { return classifierEvaluationDao; } public ConceptDao getConceptDao() { return conceptDao; } // private String createKey(String c1, String c2) { // if (c1.compareTo(c2) < 0) { // return new StringBuilder(c1).append("-").append(c2).toString(); // } else { // return new StringBuilder(c2).append("-").append(c1).toString(); // } // } @Override public ConceptGraph getConceptGraph() { return cg; } public String getConceptGraphName() { return conceptGraphName; } public String getConceptSetName() { return conceptSetName; } public String getCorpusName() { return corpusName; } @Override public Map<String, BitSet> getCuiTuiMap() { return cuiTuiMap; } @Override public int getDepth(String concept) { // if (isPreload()) { // // preloaded all concept info - depth should be there // ConceptInfo ci = this.getPreloadedConceptInfo(concept); // if (ci != null) // return (int) ci.getDepth(); // } else { // // get the feature ranks for the intrinsic infocontent - // // rank = depth // Map<String, FeatureRank> frMap = getICOnDemand(new HashSet<String>( // Arrays.asList(concept)), true); // if (frMap.containsKey(concept)) // return frMap.get(concept).getRank(); // } ConcRel cr = this.cg.getConceptMap().get(concept); if (cr != null) return cr.getDepth(); return 0; } @Override public double getIC(String concept, boolean intrinsicICMap) { double ic = 0d; if (intrinsicICMap) { ConcRel cr = this.cg.getConceptMap().get(concept); if (cr != null) ic = cr.getIntrinsicInfoContent(); } else { Double icC = null; if (isPreload()) { // we preloaded all ic - just look in the cache icC = this.corpusICMap.get(concept); } else { // we need to load the ic from the database on demand Map<String, FeatureRank> frMap = getICOnDemand( new HashSet<String>(Arrays.asList(concept)), false); if (frMap.containsKey(concept)) return frMap.get(concept).getEvaluation(); } if (icC != null) ic = icC; } return ic; // if (isPreload()) { // ConceptInfo ci = this.getPreloadedConceptInfo(concept); // if (ci != null) // return intrinsicICMap ? ci.getIntrinsicIC() : ci.getCorpusIC(); // } else { // Map<String, FeatureRank> frMap = getICOnDemand(new HashSet<String>( // Arrays.asList(concept)), intrinsicICMap); // if (frMap.containsKey(concept)) // return frMap.get(concept).getEvaluation(); // } // return 0d; } private Map<String, FeatureRank> getICOnDemand(Set<String> lcses, boolean intrinsicIC) { if (lcses == null || lcses.isEmpty()) return new HashMap<String, FeatureRank>(0); Map<String, FeatureRank> lcsICMap; lcsICMap = this.classifierEvaluationDao .getFeatureRanks( lcses, intrinsicIC ? null : this.corpusName, intrinsicIC ? null : this.conceptSetName, null, intrinsicIC ? IntrinsicInfoContentEvaluator.INTRINSIC_INFOCONTENT : InfoContentEvaluator.INFOCONTENT, null, 0d, this.getConceptGraphName()); return lcsICMap; } // /** // * get the concept with the lowest Information Content of all the LCSs. // * Functionality copied from umls interface. // * // * @todo make this configurable/add a parameter - avg/min/max/median? // * @param lcses // * @return // */ // public double getIC(Iterable<String> lcses) { // double ic = 0; // for (String lcs : lcses) { // double ictmp = getIC(lcs); // if (ic < ictmp) // ic = ictmp; // } // return ic; // } // // public double getIC(String concept1) { // Double dRetVal = corpusICMap.get(concept1); // if (dRetVal != null) // return (double) dRetVal; // else // return 0; // } public int getLCS(String concept1, String concept2, Set<String> lcses, List<LCSPath> lcsPaths) { int lcsDist = 0; ConcRel cr1 = getConceptGraph().getConceptMap().get(concept1); ConcRel cr2 = getConceptGraph().getConceptMap().get(concept2); if (cr1 != null && cr2 != null) { lcses.clear(); if (lcsPaths == null) { // no need to get paths which we don't cache - look in the cache lcsDist = getLCSFromCache(cr1, cr2, lcses); } else { lcsPaths.clear(); // need to get paths - compute the lcses and their paths lcsDist = lcs(concept1, concept2, lcsPaths); for (LCSPath lcsPath : lcsPaths) { lcses.add(lcsPath.getLcs()); } } } else { if ( LOGGER.isDebugEnabled()) { if (cr1 == null) LOGGER.debug("could not find concept:" + concept1); if (cr2 == null) LOGGER.debug("could not find concept:" + concept2); } } return lcsDist; } public Cache getLcsCache() { return lcsCache; } @SuppressWarnings("unchecked") private int getLCSFromCache(ConcRel cr1, ConcRel cr2, Set<String> lcses) { StringBuilder cacheKeyBuilder = new StringBuilder(this.conceptGraphName); cacheKeyBuilder .append(cr1.getConceptID().compareTo(cr2.getConceptID()) < 0 ? cr1 .getConceptID() : cr2.getConceptID()); cacheKeyBuilder .append(cr1.getConceptID().compareTo(cr2.getConceptID()) >= 0 ? cr2 .getConceptID() : cr1.getConceptID()); String cacheKey = cacheKeyBuilder.toString(); Element e = this.lcsCache != null ? this.lcsCache.get(cacheKey) : null; if (e != null) { // hit the cache - unpack the lcs if (e.getObjectValue() != null) { Object[] val = (Object[]) e.getObjectValue(); lcses.addAll((Set<String>) val[1]); return (Integer) val[0]; } else { return -1; } } else { // missed the cache - save the lcs Object[] val = null; Set<ConcRel> lcsCRSet = new HashSet<ConcRel>(2); int dist = ConcRel.getLeastCommonConcept(cr1, cr2, lcsCRSet, null); if (dist >= 0) { val = new Object[2]; val[0] = dist; for (ConcRel cr : lcsCRSet) { lcses.add(cr.getConceptID()); } val[1] = lcses; } if (this.lcsCache != null) { e = new Element(cacheKey, val); this.lcsCache.put(e); } return dist; } } public String getLcsImputedType() { return lcsImputedType; } public PageRankService getPageRankService() { return pageRankService; } public Map<SimilarityMetricEnum, SimilarityMetric> getSimilarityMetricMap() { return similarityMetricMap; } public PlatformTransactionManager getTransactionManager() { return transactionManager; } @Override public List<String> getTuiList() { return this.tuiList; } public void init() { LOGGER.info("begin initialization for concept graph: " + conceptGraphName); cg = conceptDao.getConceptGraph(conceptGraphName); if (cg == null) { LOGGER.warn("concept graph null, name: " + conceptGraphName); } else { initSimilarityMetricMap(); if (isPreload()) { try { TransactionTemplate t = new TransactionTemplate( this.transactionManager); t.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW); t.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus arg0) { initInfoContent(); initCuiTuiMapFromCorpus(); return null; } }); } catch (Exception e) { LOGGER.info("could not initialize cui-tui map: " + e.getMessage() + ". This is expected if you do not have umls installed in your db."); } } } LOGGER.info("end initialization for concept graph: " + conceptGraphName); } /** * load cui-tui for the specified corpus from the MRSTY table */ public void initCuiTuiMapFromCorpus() { // don't duplicate tui strings to save memory SortedMap<String, String> tuiMap = new TreeMap<String, String>(); Map<String, Set<String>> tmpTuiCuiMap = new HashMap<String, Set<String>>(); List<Object[]> listCuiTui = this.classifierEvaluationDao .getCorpusCuiTuis(this.getCorpusName(), this.getConceptGraphName(), this.getConceptSetName()); for (Object[] cuiTui : listCuiTui) { String cui = (String) cuiTui[0]; String tui = (String) cuiTui[1]; addCuiTuiToMap(tmpTuiCuiMap, tuiMap, cui, tui); } // map of tui - bitset index SortedMap<String, Integer> mapTuiIndex = new TreeMap<String, Integer>(); // list of tuis corresponding to bitset indices List<String> tmpTuiList = new ArrayList<String>(tuiMap.size()); int index = 0; for (String tui : tuiMap.keySet()) { mapTuiIndex.put(tui, index++); tmpTuiList.add(tui); } this.tuiList = Collections.unmodifiableList(tmpTuiList); // convert list of cuis into bitsets // Map<String, BitSet> tmpCuiTuiBitsetMap = new HashMap<String, // BitSet>(); ImmutableMap.Builder<String, BitSet> cuiTuiBitsetMapBuilder = new ImmutableMap.Builder<String, BitSet>(); for (Map.Entry<String, Set<String>> cuiTuiMapEntry : tmpTuiCuiMap .entrySet()) { // tmpCuiTuiBitsetMap.put(cuiTuiMapEntry.getKey(), // tuiListToBitset(cuiTuiMapEntry.getValue(), mapTuiIndex)); cuiTuiBitsetMapBuilder.put(cuiTuiMapEntry.getKey(), tuiListToBitset(cuiTuiMapEntry.getValue(), mapTuiIndex)); } // this.cuiTuiMap = Collections.unmodifiableMap(tmpCuiTuiBitsetMap); this.cuiTuiMap = cuiTuiBitsetMapBuilder.build(); } /** * initialize information content caches TODO replace strings with concept * ids from conceptGraph to save memory */ private void initInfoContent() { // log.info("loading intrinsic infocontent for concept graph: " // + conceptGraphName); // List<ConceptInfo> listConceptInfo = classifierEvaluationDao // .getIntrinsicInfoContent(conceptGraphName); // if (listConceptInfo.isEmpty()) { // log.warn("intrinsic info content not available! most similarity measures will not work"); // } // this.conceptInfoCache = new ConceptInfo[cg.getConceptMap().size()]; // for (ConceptInfo ci : listConceptInfo) { // ConcRel cr = cg.getConceptMap().get(ci.getConceptId()); // if (cr != null) { // // save a little memory by reusing the string // ci.setConceptId(cr.getConceptID()); // conceptInfoCache[cr.getNodeIndex()] = ci; // } // } // fill intrinsicIC // Map<String, FeatureRank> intrinsicICMap = classifierEvaluationDao // .getIntrinsicInfoContent(conceptGraphName); // for (Map.Entry<String, FeatureRank> icMapEntry : intrinsicICMap // .entrySet()) { // FeatureRank r = icMapEntry.getValue(); // ConcRel cr = cg.getConceptMap().get(r.getFeatureName()); // if (cr != null) { // ConceptInfo ci = new ConceptInfo(); // ci.setConceptId(cr.getConceptID()); // ci.setDepth(r.getRank()); // ci.setIntrinsicIC(r.getEvaluation()); // conceptInfoMap.put(ci.getConceptId(), ci); // } // } // fill corpusIC LOGGER.info("loading corpus infocontent for corpusName=" + corpusName + ", conceptGraphName=" + conceptGraphName + ", conceptSetName=" + conceptSetName); Map<String, Double> corpusICMap = classifierEvaluationDao .getInfoContent(corpusName, conceptGraphName, this.conceptSetName); if (corpusICMap == null || corpusICMap.isEmpty()) { LOGGER.warn("IC not found"); } ImmutableMap.Builder<String, Double> mb = new ImmutableMap.Builder<String, Double>(); for (Map.Entry<String, Double> corpusICEntry : corpusICMap.entrySet()) { ConcRel cr = cg.getConceptMap().get(corpusICEntry.getKey()); if (cr != null) { mb.put(cr.getConceptID(), corpusICEntry.getValue()); } } this.corpusICMap = mb.build(); // ConceptInfo ci = this.conceptInfoCache[cr.getNodeIndex()]; // if (ci == null) { // // this shouldn't happen! there should be intrinsic ic for // // this concept // ci = new ConceptInfo(); // ci.setConceptId(cr.getConceptID()); // this.conceptInfoCache[cr.getNodeIndex()] = ci; // } // ci.setCorpusIC(corpusICEntry.getValue()); // } // } } /** * initialize the metrics */ private void initSimilarityMetricMap() { LOGGER.info("initializing similarity measures"); // Double maxIC = this.classifierEvaluationDao.getMaxFeatureEvaluation( // null, null, null, // IntrinsicInfoContentEvaluator.INTRINSIC_INFOCONTENT, 0, 0, // conceptGraphName); // Integer maxDepth = this.classifierEvaluationDao // .getMaxDepth(conceptGraphName); double maxIC = this.cg.getIntrinsicICMax(); int maxDepth = this.cg.getDepthMax(); this.similarityMetricMap = new HashMap<SimilarityMetricEnum, SimilarityMetric>( SimilarityMetricEnum.values().length); if (maxDepth > 0) { this.similarityMetricMap.put(SimilarityMetricEnum.LCH, new LCHMetric(this, maxDepth)); this.similarityMetricMap.put(SimilarityMetricEnum.LIN, new LinMetric(this, false)); this.similarityMetricMap.put(SimilarityMetricEnum.INTRINSIC_LIN, new LinMetric(this, true)); this.similarityMetricMap.put(SimilarityMetricEnum.INTRINSIC_LCH, new IntrinsicLCHMetric(this, maxIC)); this.similarityMetricMap.put(SimilarityMetricEnum.PATH, new PathMetric(this)); this.similarityMetricMap.put(SimilarityMetricEnum.INTRINSIC_PATH, new IntrinsicPathMetric(this, maxIC)); this.similarityMetricMap.put(SimilarityMetricEnum.RADA, new RadaMetric(this, maxDepth)); this.similarityMetricMap.put(SimilarityMetricEnum.INTRINSIC_RADA, new IntrinsicRadaMetric(this, maxIC)); this.similarityMetricMap.put(SimilarityMetricEnum.SOKAL, new SokalSneathMetric(this)); this.similarityMetricMap.put(SimilarityMetricEnum.JACCARD, new JaccardMetric(this)); this.similarityMetricMap.put(SimilarityMetricEnum.WUPALMER, new WuPalmerMetric(this)); } else { this.similarityMetricMap.put(SimilarityMetricEnum.PAGERANK, new PageRankMetric(this, this.getPageRankService())); } } public boolean isPreload() { return preload; } public int lcs(String concept1, String concept2, List<LCSPath> lcsPaths) { ConcRel cr1 = cg.getConceptMap().get(concept1); ConcRel cr2 = cg.getConceptMap().get(concept2); int dist = -1; if (cr1 != null && cr2 != null) { Set<ConcRel> crlcses = new HashSet<ConcRel>(); Map<ConcRel, LCSPath> crpaths = new HashMap<ConcRel, LCSPath>(); dist = ConcRel.getLeastCommonConcept(cr1, cr2, crlcses, crpaths); lcsPaths.addAll(crpaths.values()); } return dist; } // /* // * (non-Javadoc) // * // * @see // org.apache.ctakes.ytex.kernel.ConceptSimilarity#lch(java.lang.String, // * java.lang.String) // */ // public double lch(String concept1, String concept2) { // double dm = 2 * cg.getDepthMax() + 1.0; // ConcRel cr1 = cg.getConceptMap().get(concept1); // ConcRel cr2 = cg.getConceptMap().get(concept2); // if (cr1 != null && cr2 != null) { // Set<String> lcses = new HashSet<String>(); // int lcsDist = getLCSFromCache(cr1, cr2, lcses); // // leacock is defined as -log([path length]/(2*[depth]) // double lch = -Math.log(((double) lcsDist + 1.0) / dm); // // scale to depth // return lch / Math.log(dm); // } else { // if (log.isDebugEnabled()) { // if (cr1 == null) // log.debug("could not find concept:" + concept1); // if (cr2 == null) // log.debug("could not find concept:" + concept2); // } // return 0; // } // } /** * For the given label and cutoff, get the corresponding concepts whose * propagated ig meets the threshold. Used by lin kernel to find concepts * that actually have a non-trivial similarity * * @param label * label * @param rankCutoff * cutoff * @param conceptFilter * set to fill with concepts * @return double minimum evaluation */ @Override public double loadConceptFilter(String label, int rankCutoff, Map<String, Double> conceptFilter) { List<FeatureRank> imputedConcepts = this.classifierEvaluationDao .getImputedFeaturesByPropagatedCutoff(corpusName, conceptSetName, label, lcsImputedType + ImputedFeatureEvaluator.SUFFIX_IMPUTED, conceptGraphName, lcsImputedType + ImputedFeatureEvaluator.SUFFIX_PROP, rankCutoff); double minEval = 1d; for (FeatureRank r : imputedConcepts) { conceptFilter.put(r.getFeatureName(), r.getEvaluation()); if (minEval >= r.getEvaluation()) minEval = r.getEvaluation(); } return minEval; } // public double lin(String concept1, String concept2) { // return filteredLin(concept1, concept2, null); // } public void setCacheManager(CacheManager cacheManager) { this.cacheManager = cacheManager; } public void setClassifierEvaluationDao( ClassifierEvaluationDao classifierEvaluationDao) { this.classifierEvaluationDao = classifierEvaluationDao; } public void setConceptDao(ConceptDao conceptDao) { this.conceptDao = conceptDao; } public void setConceptGraphName(String conceptGraphName) { this.conceptGraphName = conceptGraphName; } public void setConceptSetName(String conceptSetName) { this.conceptSetName = conceptSetName; } public void setCorpusName(String corpusName) { this.corpusName = corpusName; } public void setLcsCache(Cache lcsCache) { this.lcsCache = lcsCache; } public void setLcsImputedType(String lcsImputedType) { this.lcsImputedType = lcsImputedType; } // double minEval = 1d; // List<FeatureRank> listPropagatedConcepts = classifierEvaluationDao // .getTopFeatures(corpusName, conceptSetName, label, // ImputedFeatureEvaluator.MeasureType.INFOGAIN.toString() // + ImputedFeatureEvaluator.SUFFIX_PROP, 0, 0, // conceptGraphName, rankCutoff); // for (FeatureRank r : listPropagatedConcepts) { // ConcRel cr = cg.getConceptMap().get(r.getFeatureName()); // if (cr != null) { // addSubtree(conceptFilterSet, cr); // } // if (r.getEvaluation() < minEval) // minEval = r.getEvaluation(); // } // return minEval; // } // // /** // * add all children of parent to conceptSet. Limit only to children that // * actually appear in the corpus // * // * @param conceptSet // * set of concepts to add ids to // * @param parent // * parent which will be added to the conceptSet // * @param corpusICSet // * set of concepts and hypernyms contained in corpus // */ // private void addSubtree(Map<String, Double> conceptSet, ConcRel parent) { // if (!conceptSet.containsKey(parent.getConceptID()) // && conceptFreq.containsKey(parent.getConceptID())) { // conceptSet.put(parent.getConceptID(), 0d); // for (ConcRel child : parent.getChildren()) { // addSubtree(conceptSet, child); // } // } // } public void setPageRankService(PageRankService pageRankService) { this.pageRankService = pageRankService; } public void setPreload(boolean preload) { this.preload = preload; } public void setSimilarityMetricMap( Map<SimilarityMetricEnum, SimilarityMetric> similarityMetricMap) { this.similarityMetricMap = similarityMetricMap; } public void setTransactionManager( PlatformTransactionManager transactionManager) { this.transactionManager = transactionManager; } @Override public List<ConceptPairSimilarity> similarity( List<ConceptPair> conceptPairs, List<SimilarityMetricEnum> metrics, Map<String, Double> conceptFilter, boolean lcs) { List<ConceptPairSimilarity> conceptSimMap = new ArrayList<ConceptPairSimilarity>( conceptPairs.size()); for (ConceptPair conceptPair : conceptPairs) { conceptSimMap.add(similarity(metrics, conceptPair.getConcept1(), conceptPair.getConcept2(), conceptFilter, lcs)); } return conceptSimMap; } /** * */ @Override public ConceptPairSimilarity similarity(List<SimilarityMetricEnum> metrics, String concept1, String concept2, Map<String, Double> conceptFilter, boolean lcs) { // allocate simInfo if this isn't provided SimilarityInfo simInfo = new SimilarityInfo(); if (lcs) simInfo.setLcsPaths(new ArrayList<LCSPath>(1)); // allocate result map List<Double> similarities = new ArrayList<Double>(metrics.size()); if (cg != null) { // iterate over metrics, compute, stuff in map for (SimilarityMetricEnum metric : metrics) { double sim = this.similarityMetricMap.get(metric).similarity( concept1, concept2, conceptFilter, simInfo); similarities.add(sim); } } ConceptPairSimilarity csim = new ConceptPairSimilarity(); csim.setConceptPair(new ConceptPair(concept1, concept2)); csim.setSimilarities(similarities); csim.setSimilarityInfo(simInfo); return csim; } /** * convert the list of tuis into a bitset * * @param tuis * @param mapTuiIndex * @return */ private BitSet tuiListToBitset(Set<String> tuis, SortedMap<String, Integer> mapTuiIndex) { BitSet bs = new BitSet(mapTuiIndex.size()); for (String tui : tuis) { bs.set(mapTuiIndex.get(tui)); } return bs; } }
googleapis/google-cloud-java
36,576
java-bigquerymigration/proto-google-cloud-bigquerymigration-v2/src/main/java/com/google/cloud/bigquery/migration/v2/NameMappingValue.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/migration/v2/translation_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.migration.v2; /** * * * <pre> * The potential components of a full name mapping that will be mapped * during translation in the target data warehouse. * </pre> * * Protobuf type {@code google.cloud.bigquery.migration.v2.NameMappingValue} */ public final class NameMappingValue extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2.NameMappingValue) NameMappingValueOrBuilder { private static final long serialVersionUID = 0L; // Use NameMappingValue.newBuilder() to construct. private NameMappingValue(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NameMappingValue() { database_ = ""; schema_ = ""; relation_ = ""; attribute_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new NameMappingValue(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.migration.v2.TranslationConfigProto .internal_static_google_cloud_bigquery_migration_v2_NameMappingValue_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.migration.v2.TranslationConfigProto .internal_static_google_cloud_bigquery_migration_v2_NameMappingValue_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.migration.v2.NameMappingValue.class, com.google.cloud.bigquery.migration.v2.NameMappingValue.Builder.class); } public static final int DATABASE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object database_ = ""; /** * * * <pre> * The database name (BigQuery project ID equivalent in the target data * warehouse). * </pre> * * <code>string database = 1;</code> * * @return The database. */ @java.lang.Override public java.lang.String getDatabase() { java.lang.Object ref = database_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); database_ = s; return s; } } /** * * * <pre> * The database name (BigQuery project ID equivalent in the target data * warehouse). * </pre> * * <code>string database = 1;</code> * * @return The bytes for database. */ @java.lang.Override public com.google.protobuf.ByteString getDatabaseBytes() { java.lang.Object ref = database_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); database_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SCHEMA_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object schema_ = ""; /** * * * <pre> * The schema name (BigQuery dataset equivalent in the target data warehouse). * </pre> * * <code>string schema = 2;</code> * * @return The schema. */ @java.lang.Override public java.lang.String getSchema() { java.lang.Object ref = schema_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); schema_ = s; return s; } } /** * * * <pre> * The schema name (BigQuery dataset equivalent in the target data warehouse). * </pre> * * <code>string schema = 2;</code> * * @return The bytes for schema. */ @java.lang.Override public com.google.protobuf.ByteString getSchemaBytes() { java.lang.Object ref = schema_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); schema_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RELATION_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object relation_ = ""; /** * * * <pre> * The relation name (BigQuery table or view equivalent in the target data * warehouse). * </pre> * * <code>string relation = 3;</code> * * @return The relation. */ @java.lang.Override public java.lang.String getRelation() { java.lang.Object ref = relation_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); relation_ = s; return s; } } /** * * * <pre> * The relation name (BigQuery table or view equivalent in the target data * warehouse). * </pre> * * <code>string relation = 3;</code> * * @return The bytes for relation. */ @java.lang.Override public com.google.protobuf.ByteString getRelationBytes() { java.lang.Object ref = relation_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); relation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ATTRIBUTE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object attribute_ = ""; /** * * * <pre> * The attribute name (BigQuery column equivalent in the target data * warehouse). * </pre> * * <code>string attribute = 4;</code> * * @return The attribute. */ @java.lang.Override public java.lang.String getAttribute() { java.lang.Object ref = attribute_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); attribute_ = s; return s; } } /** * * * <pre> * The attribute name (BigQuery column equivalent in the target data * warehouse). * </pre> * * <code>string attribute = 4;</code> * * @return The bytes for attribute. */ @java.lang.Override public com.google.protobuf.ByteString getAttributeBytes() { java.lang.Object ref = attribute_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); attribute_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(database_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, database_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(schema_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, schema_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(relation_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, relation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(attribute_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, attribute_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(database_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, database_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(schema_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, schema_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(relation_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, relation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(attribute_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, attribute_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.migration.v2.NameMappingValue)) { return super.equals(obj); } com.google.cloud.bigquery.migration.v2.NameMappingValue other = (com.google.cloud.bigquery.migration.v2.NameMappingValue) obj; if (!getDatabase().equals(other.getDatabase())) return false; if (!getSchema().equals(other.getSchema())) return false; if (!getRelation().equals(other.getRelation())) return false; if (!getAttribute().equals(other.getAttribute())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DATABASE_FIELD_NUMBER; hash = (53 * hash) + getDatabase().hashCode(); hash = (37 * hash) + SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getSchema().hashCode(); hash = (37 * hash) + RELATION_FIELD_NUMBER; hash = (53 * hash) + getRelation().hashCode(); hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; hash = (53 * hash) + getAttribute().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.migration.v2.NameMappingValue prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The potential components of a full name mapping that will be mapped * during translation in the target data warehouse. * </pre> * * Protobuf type {@code google.cloud.bigquery.migration.v2.NameMappingValue} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2.NameMappingValue) com.google.cloud.bigquery.migration.v2.NameMappingValueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.migration.v2.TranslationConfigProto .internal_static_google_cloud_bigquery_migration_v2_NameMappingValue_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.migration.v2.TranslationConfigProto .internal_static_google_cloud_bigquery_migration_v2_NameMappingValue_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.migration.v2.NameMappingValue.class, com.google.cloud.bigquery.migration.v2.NameMappingValue.Builder.class); } // Construct using com.google.cloud.bigquery.migration.v2.NameMappingValue.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; database_ = ""; schema_ = ""; relation_ = ""; attribute_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.migration.v2.TranslationConfigProto .internal_static_google_cloud_bigquery_migration_v2_NameMappingValue_descriptor; } @java.lang.Override public com.google.cloud.bigquery.migration.v2.NameMappingValue getDefaultInstanceForType() { return com.google.cloud.bigquery.migration.v2.NameMappingValue.getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.migration.v2.NameMappingValue build() { com.google.cloud.bigquery.migration.v2.NameMappingValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.migration.v2.NameMappingValue buildPartial() { com.google.cloud.bigquery.migration.v2.NameMappingValue result = new com.google.cloud.bigquery.migration.v2.NameMappingValue(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.bigquery.migration.v2.NameMappingValue result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.database_ = database_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.schema_ = schema_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.relation_ = relation_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.attribute_ = attribute_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.migration.v2.NameMappingValue) { return mergeFrom((com.google.cloud.bigquery.migration.v2.NameMappingValue) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.bigquery.migration.v2.NameMappingValue other) { if (other == com.google.cloud.bigquery.migration.v2.NameMappingValue.getDefaultInstance()) return this; if (!other.getDatabase().isEmpty()) { database_ = other.database_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getSchema().isEmpty()) { schema_ = other.schema_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getRelation().isEmpty()) { relation_ = other.relation_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getAttribute().isEmpty()) { attribute_ = other.attribute_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { database_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { schema_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { relation_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { attribute_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object database_ = ""; /** * * * <pre> * The database name (BigQuery project ID equivalent in the target data * warehouse). * </pre> * * <code>string database = 1;</code> * * @return The database. */ public java.lang.String getDatabase() { java.lang.Object ref = database_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); database_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The database name (BigQuery project ID equivalent in the target data * warehouse). * </pre> * * <code>string database = 1;</code> * * @return The bytes for database. */ public com.google.protobuf.ByteString getDatabaseBytes() { java.lang.Object ref = database_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); database_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The database name (BigQuery project ID equivalent in the target data * warehouse). * </pre> * * <code>string database = 1;</code> * * @param value The database to set. * @return This builder for chaining. */ public Builder setDatabase(java.lang.String value) { if (value == null) { throw new NullPointerException(); } database_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The database name (BigQuery project ID equivalent in the target data * warehouse). * </pre> * * <code>string database = 1;</code> * * @return This builder for chaining. */ public Builder clearDatabase() { database_ = getDefaultInstance().getDatabase(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The database name (BigQuery project ID equivalent in the target data * warehouse). * </pre> * * <code>string database = 1;</code> * * @param value The bytes for database to set. * @return This builder for chaining. */ public Builder setDatabaseBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); database_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object schema_ = ""; /** * * * <pre> * The schema name (BigQuery dataset equivalent in the target data warehouse). * </pre> * * <code>string schema = 2;</code> * * @return The schema. */ public java.lang.String getSchema() { java.lang.Object ref = schema_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); schema_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The schema name (BigQuery dataset equivalent in the target data warehouse). * </pre> * * <code>string schema = 2;</code> * * @return The bytes for schema. */ public com.google.protobuf.ByteString getSchemaBytes() { java.lang.Object ref = schema_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); schema_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The schema name (BigQuery dataset equivalent in the target data warehouse). * </pre> * * <code>string schema = 2;</code> * * @param value The schema to set. * @return This builder for chaining. */ public Builder setSchema(java.lang.String value) { if (value == null) { throw new NullPointerException(); } schema_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The schema name (BigQuery dataset equivalent in the target data warehouse). * </pre> * * <code>string schema = 2;</code> * * @return This builder for chaining. */ public Builder clearSchema() { schema_ = getDefaultInstance().getSchema(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The schema name (BigQuery dataset equivalent in the target data warehouse). * </pre> * * <code>string schema = 2;</code> * * @param value The bytes for schema to set. * @return This builder for chaining. */ public Builder setSchemaBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); schema_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object relation_ = ""; /** * * * <pre> * The relation name (BigQuery table or view equivalent in the target data * warehouse). * </pre> * * <code>string relation = 3;</code> * * @return The relation. */ public java.lang.String getRelation() { java.lang.Object ref = relation_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); relation_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The relation name (BigQuery table or view equivalent in the target data * warehouse). * </pre> * * <code>string relation = 3;</code> * * @return The bytes for relation. */ public com.google.protobuf.ByteString getRelationBytes() { java.lang.Object ref = relation_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); relation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The relation name (BigQuery table or view equivalent in the target data * warehouse). * </pre> * * <code>string relation = 3;</code> * * @param value The relation to set. * @return This builder for chaining. */ public Builder setRelation(java.lang.String value) { if (value == null) { throw new NullPointerException(); } relation_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The relation name (BigQuery table or view equivalent in the target data * warehouse). * </pre> * * <code>string relation = 3;</code> * * @return This builder for chaining. */ public Builder clearRelation() { relation_ = getDefaultInstance().getRelation(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The relation name (BigQuery table or view equivalent in the target data * warehouse). * </pre> * * <code>string relation = 3;</code> * * @param value The bytes for relation to set. * @return This builder for chaining. */ public Builder setRelationBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); relation_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object attribute_ = ""; /** * * * <pre> * The attribute name (BigQuery column equivalent in the target data * warehouse). * </pre> * * <code>string attribute = 4;</code> * * @return The attribute. */ public java.lang.String getAttribute() { java.lang.Object ref = attribute_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); attribute_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The attribute name (BigQuery column equivalent in the target data * warehouse). * </pre> * * <code>string attribute = 4;</code> * * @return The bytes for attribute. */ public com.google.protobuf.ByteString getAttributeBytes() { java.lang.Object ref = attribute_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); attribute_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The attribute name (BigQuery column equivalent in the target data * warehouse). * </pre> * * <code>string attribute = 4;</code> * * @param value The attribute to set. * @return This builder for chaining. */ public Builder setAttribute(java.lang.String value) { if (value == null) { throw new NullPointerException(); } attribute_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The attribute name (BigQuery column equivalent in the target data * warehouse). * </pre> * * <code>string attribute = 4;</code> * * @return This builder for chaining. */ public Builder clearAttribute() { attribute_ = getDefaultInstance().getAttribute(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The attribute name (BigQuery column equivalent in the target data * warehouse). * </pre> * * <code>string attribute = 4;</code> * * @param value The bytes for attribute to set. * @return This builder for chaining. */ public Builder setAttributeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); attribute_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2.NameMappingValue) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2.NameMappingValue) private static final com.google.cloud.bigquery.migration.v2.NameMappingValue DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2.NameMappingValue(); } public static com.google.cloud.bigquery.migration.v2.NameMappingValue getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<NameMappingValue> PARSER = new com.google.protobuf.AbstractParser<NameMappingValue>() { @java.lang.Override public NameMappingValue parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<NameMappingValue> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<NameMappingValue> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.migration.v2.NameMappingValue getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,709
java-modelarmor/proto-google-cloud-modelarmor-v1/src/main/java/com/google/cloud/modelarmor/v1/SdpFilterResult.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/modelarmor/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.modelarmor.v1; /** * * * <pre> * Sensitive Data Protection filter result. * </pre> * * Protobuf type {@code google.cloud.modelarmor.v1.SdpFilterResult} */ public final class SdpFilterResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.modelarmor.v1.SdpFilterResult) SdpFilterResultOrBuilder { private static final long serialVersionUID = 0L; // Use SdpFilterResult.newBuilder() to construct. private SdpFilterResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SdpFilterResult() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SdpFilterResult(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.modelarmor.v1.V1mainProto .internal_static_google_cloud_modelarmor_v1_SdpFilterResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.modelarmor.v1.V1mainProto .internal_static_google_cloud_modelarmor_v1_SdpFilterResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.modelarmor.v1.SdpFilterResult.class, com.google.cloud.modelarmor.v1.SdpFilterResult.Builder.class); } private int resultCase_ = 0; @SuppressWarnings("serial") private java.lang.Object result_; public enum ResultCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { INSPECT_RESULT(1), DEIDENTIFY_RESULT(2), RESULT_NOT_SET(0); private final int value; private ResultCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ResultCase valueOf(int value) { return forNumber(value); } public static ResultCase forNumber(int value) { switch (value) { case 1: return INSPECT_RESULT; case 2: return DEIDENTIFY_RESULT; case 0: return RESULT_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public ResultCase getResultCase() { return ResultCase.forNumber(resultCase_); } public static final int INSPECT_RESULT_FIELD_NUMBER = 1; /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> * * @return Whether the inspectResult field is set. */ @java.lang.Override public boolean hasInspectResult() { return resultCase_ == 1; } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> * * @return The inspectResult. */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpInspectResult getInspectResult() { if (resultCase_ == 1) { return (com.google.cloud.modelarmor.v1.SdpInspectResult) result_; } return com.google.cloud.modelarmor.v1.SdpInspectResult.getDefaultInstance(); } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpInspectResultOrBuilder getInspectResultOrBuilder() { if (resultCase_ == 1) { return (com.google.cloud.modelarmor.v1.SdpInspectResult) result_; } return com.google.cloud.modelarmor.v1.SdpInspectResult.getDefaultInstance(); } public static final int DEIDENTIFY_RESULT_FIELD_NUMBER = 2; /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> * * @return Whether the deidentifyResult field is set. */ @java.lang.Override public boolean hasDeidentifyResult() { return resultCase_ == 2; } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> * * @return The deidentifyResult. */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpDeidentifyResult getDeidentifyResult() { if (resultCase_ == 2) { return (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_; } return com.google.cloud.modelarmor.v1.SdpDeidentifyResult.getDefaultInstance(); } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpDeidentifyResultOrBuilder getDeidentifyResultOrBuilder() { if (resultCase_ == 2) { return (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_; } return com.google.cloud.modelarmor.v1.SdpDeidentifyResult.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (resultCase_ == 1) { output.writeMessage(1, (com.google.cloud.modelarmor.v1.SdpInspectResult) result_); } if (resultCase_ == 2) { output.writeMessage(2, (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (resultCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.modelarmor.v1.SdpInspectResult) result_); } if (resultCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.modelarmor.v1.SdpFilterResult)) { return super.equals(obj); } com.google.cloud.modelarmor.v1.SdpFilterResult other = (com.google.cloud.modelarmor.v1.SdpFilterResult) obj; if (!getResultCase().equals(other.getResultCase())) return false; switch (resultCase_) { case 1: if (!getInspectResult().equals(other.getInspectResult())) return false; break; case 2: if (!getDeidentifyResult().equals(other.getDeidentifyResult())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (resultCase_) { case 1: hash = (37 * hash) + INSPECT_RESULT_FIELD_NUMBER; hash = (53 * hash) + getInspectResult().hashCode(); break; case 2: hash = (37 * hash) + DEIDENTIFY_RESULT_FIELD_NUMBER; hash = (53 * hash) + getDeidentifyResult().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.modelarmor.v1.SdpFilterResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.modelarmor.v1.SdpFilterResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Sensitive Data Protection filter result. * </pre> * * Protobuf type {@code google.cloud.modelarmor.v1.SdpFilterResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.modelarmor.v1.SdpFilterResult) com.google.cloud.modelarmor.v1.SdpFilterResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.modelarmor.v1.V1mainProto .internal_static_google_cloud_modelarmor_v1_SdpFilterResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.modelarmor.v1.V1mainProto .internal_static_google_cloud_modelarmor_v1_SdpFilterResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.modelarmor.v1.SdpFilterResult.class, com.google.cloud.modelarmor.v1.SdpFilterResult.Builder.class); } // Construct using com.google.cloud.modelarmor.v1.SdpFilterResult.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (inspectResultBuilder_ != null) { inspectResultBuilder_.clear(); } if (deidentifyResultBuilder_ != null) { deidentifyResultBuilder_.clear(); } resultCase_ = 0; result_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.modelarmor.v1.V1mainProto .internal_static_google_cloud_modelarmor_v1_SdpFilterResult_descriptor; } @java.lang.Override public com.google.cloud.modelarmor.v1.SdpFilterResult getDefaultInstanceForType() { return com.google.cloud.modelarmor.v1.SdpFilterResult.getDefaultInstance(); } @java.lang.Override public com.google.cloud.modelarmor.v1.SdpFilterResult build() { com.google.cloud.modelarmor.v1.SdpFilterResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.modelarmor.v1.SdpFilterResult buildPartial() { com.google.cloud.modelarmor.v1.SdpFilterResult result = new com.google.cloud.modelarmor.v1.SdpFilterResult(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.modelarmor.v1.SdpFilterResult result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.cloud.modelarmor.v1.SdpFilterResult result) { result.resultCase_ = resultCase_; result.result_ = this.result_; if (resultCase_ == 1 && inspectResultBuilder_ != null) { result.result_ = inspectResultBuilder_.build(); } if (resultCase_ == 2 && deidentifyResultBuilder_ != null) { result.result_ = deidentifyResultBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.modelarmor.v1.SdpFilterResult) { return mergeFrom((com.google.cloud.modelarmor.v1.SdpFilterResult) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.modelarmor.v1.SdpFilterResult other) { if (other == com.google.cloud.modelarmor.v1.SdpFilterResult.getDefaultInstance()) return this; switch (other.getResultCase()) { case INSPECT_RESULT: { mergeInspectResult(other.getInspectResult()); break; } case DEIDENTIFY_RESULT: { mergeDeidentifyResult(other.getDeidentifyResult()); break; } case RESULT_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getInspectResultFieldBuilder().getBuilder(), extensionRegistry); resultCase_ = 1; break; } // case 10 case 18: { input.readMessage( getDeidentifyResultFieldBuilder().getBuilder(), extensionRegistry); resultCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int resultCase_ = 0; private java.lang.Object result_; public ResultCase getResultCase() { return ResultCase.forNumber(resultCase_); } public Builder clearResult() { resultCase_ = 0; result_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.modelarmor.v1.SdpInspectResult, com.google.cloud.modelarmor.v1.SdpInspectResult.Builder, com.google.cloud.modelarmor.v1.SdpInspectResultOrBuilder> inspectResultBuilder_; /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> * * @return Whether the inspectResult field is set. */ @java.lang.Override public boolean hasInspectResult() { return resultCase_ == 1; } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> * * @return The inspectResult. */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpInspectResult getInspectResult() { if (inspectResultBuilder_ == null) { if (resultCase_ == 1) { return (com.google.cloud.modelarmor.v1.SdpInspectResult) result_; } return com.google.cloud.modelarmor.v1.SdpInspectResult.getDefaultInstance(); } else { if (resultCase_ == 1) { return inspectResultBuilder_.getMessage(); } return com.google.cloud.modelarmor.v1.SdpInspectResult.getDefaultInstance(); } } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ public Builder setInspectResult(com.google.cloud.modelarmor.v1.SdpInspectResult value) { if (inspectResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } result_ = value; onChanged(); } else { inspectResultBuilder_.setMessage(value); } resultCase_ = 1; return this; } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ public Builder setInspectResult( com.google.cloud.modelarmor.v1.SdpInspectResult.Builder builderForValue) { if (inspectResultBuilder_ == null) { result_ = builderForValue.build(); onChanged(); } else { inspectResultBuilder_.setMessage(builderForValue.build()); } resultCase_ = 1; return this; } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ public Builder mergeInspectResult(com.google.cloud.modelarmor.v1.SdpInspectResult value) { if (inspectResultBuilder_ == null) { if (resultCase_ == 1 && result_ != com.google.cloud.modelarmor.v1.SdpInspectResult.getDefaultInstance()) { result_ = com.google.cloud.modelarmor.v1.SdpInspectResult.newBuilder( (com.google.cloud.modelarmor.v1.SdpInspectResult) result_) .mergeFrom(value) .buildPartial(); } else { result_ = value; } onChanged(); } else { if (resultCase_ == 1) { inspectResultBuilder_.mergeFrom(value); } else { inspectResultBuilder_.setMessage(value); } } resultCase_ = 1; return this; } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ public Builder clearInspectResult() { if (inspectResultBuilder_ == null) { if (resultCase_ == 1) { resultCase_ = 0; result_ = null; onChanged(); } } else { if (resultCase_ == 1) { resultCase_ = 0; result_ = null; } inspectResultBuilder_.clear(); } return this; } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ public com.google.cloud.modelarmor.v1.SdpInspectResult.Builder getInspectResultBuilder() { return getInspectResultFieldBuilder().getBuilder(); } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpInspectResultOrBuilder getInspectResultOrBuilder() { if ((resultCase_ == 1) && (inspectResultBuilder_ != null)) { return inspectResultBuilder_.getMessageOrBuilder(); } else { if (resultCase_ == 1) { return (com.google.cloud.modelarmor.v1.SdpInspectResult) result_; } return com.google.cloud.modelarmor.v1.SdpInspectResult.getDefaultInstance(); } } /** * * * <pre> * Sensitive Data Protection Inspection result if inspection is performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpInspectResult inspect_result = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.modelarmor.v1.SdpInspectResult, com.google.cloud.modelarmor.v1.SdpInspectResult.Builder, com.google.cloud.modelarmor.v1.SdpInspectResultOrBuilder> getInspectResultFieldBuilder() { if (inspectResultBuilder_ == null) { if (!(resultCase_ == 1)) { result_ = com.google.cloud.modelarmor.v1.SdpInspectResult.getDefaultInstance(); } inspectResultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.modelarmor.v1.SdpInspectResult, com.google.cloud.modelarmor.v1.SdpInspectResult.Builder, com.google.cloud.modelarmor.v1.SdpInspectResultOrBuilder>( (com.google.cloud.modelarmor.v1.SdpInspectResult) result_, getParentForChildren(), isClean()); result_ = null; } resultCase_ = 1; onChanged(); return inspectResultBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.modelarmor.v1.SdpDeidentifyResult, com.google.cloud.modelarmor.v1.SdpDeidentifyResult.Builder, com.google.cloud.modelarmor.v1.SdpDeidentifyResultOrBuilder> deidentifyResultBuilder_; /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> * * @return Whether the deidentifyResult field is set. */ @java.lang.Override public boolean hasDeidentifyResult() { return resultCase_ == 2; } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> * * @return The deidentifyResult. */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpDeidentifyResult getDeidentifyResult() { if (deidentifyResultBuilder_ == null) { if (resultCase_ == 2) { return (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_; } return com.google.cloud.modelarmor.v1.SdpDeidentifyResult.getDefaultInstance(); } else { if (resultCase_ == 2) { return deidentifyResultBuilder_.getMessage(); } return com.google.cloud.modelarmor.v1.SdpDeidentifyResult.getDefaultInstance(); } } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ public Builder setDeidentifyResult(com.google.cloud.modelarmor.v1.SdpDeidentifyResult value) { if (deidentifyResultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } result_ = value; onChanged(); } else { deidentifyResultBuilder_.setMessage(value); } resultCase_ = 2; return this; } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ public Builder setDeidentifyResult( com.google.cloud.modelarmor.v1.SdpDeidentifyResult.Builder builderForValue) { if (deidentifyResultBuilder_ == null) { result_ = builderForValue.build(); onChanged(); } else { deidentifyResultBuilder_.setMessage(builderForValue.build()); } resultCase_ = 2; return this; } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ public Builder mergeDeidentifyResult(com.google.cloud.modelarmor.v1.SdpDeidentifyResult value) { if (deidentifyResultBuilder_ == null) { if (resultCase_ == 2 && result_ != com.google.cloud.modelarmor.v1.SdpDeidentifyResult.getDefaultInstance()) { result_ = com.google.cloud.modelarmor.v1.SdpDeidentifyResult.newBuilder( (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_) .mergeFrom(value) .buildPartial(); } else { result_ = value; } onChanged(); } else { if (resultCase_ == 2) { deidentifyResultBuilder_.mergeFrom(value); } else { deidentifyResultBuilder_.setMessage(value); } } resultCase_ = 2; return this; } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ public Builder clearDeidentifyResult() { if (deidentifyResultBuilder_ == null) { if (resultCase_ == 2) { resultCase_ = 0; result_ = null; onChanged(); } } else { if (resultCase_ == 2) { resultCase_ = 0; result_ = null; } deidentifyResultBuilder_.clear(); } return this; } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ public com.google.cloud.modelarmor.v1.SdpDeidentifyResult.Builder getDeidentifyResultBuilder() { return getDeidentifyResultFieldBuilder().getBuilder(); } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ @java.lang.Override public com.google.cloud.modelarmor.v1.SdpDeidentifyResultOrBuilder getDeidentifyResultOrBuilder() { if ((resultCase_ == 2) && (deidentifyResultBuilder_ != null)) { return deidentifyResultBuilder_.getMessageOrBuilder(); } else { if (resultCase_ == 2) { return (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_; } return com.google.cloud.modelarmor.v1.SdpDeidentifyResult.getDefaultInstance(); } } /** * * * <pre> * Sensitive Data Protection Deidentification result if deidentification is * performed. * </pre> * * <code>.google.cloud.modelarmor.v1.SdpDeidentifyResult deidentify_result = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.modelarmor.v1.SdpDeidentifyResult, com.google.cloud.modelarmor.v1.SdpDeidentifyResult.Builder, com.google.cloud.modelarmor.v1.SdpDeidentifyResultOrBuilder> getDeidentifyResultFieldBuilder() { if (deidentifyResultBuilder_ == null) { if (!(resultCase_ == 2)) { result_ = com.google.cloud.modelarmor.v1.SdpDeidentifyResult.getDefaultInstance(); } deidentifyResultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.modelarmor.v1.SdpDeidentifyResult, com.google.cloud.modelarmor.v1.SdpDeidentifyResult.Builder, com.google.cloud.modelarmor.v1.SdpDeidentifyResultOrBuilder>( (com.google.cloud.modelarmor.v1.SdpDeidentifyResult) result_, getParentForChildren(), isClean()); result_ = null; } resultCase_ = 2; onChanged(); return deidentifyResultBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.modelarmor.v1.SdpFilterResult) } // @@protoc_insertion_point(class_scope:google.cloud.modelarmor.v1.SdpFilterResult) private static final com.google.cloud.modelarmor.v1.SdpFilterResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.modelarmor.v1.SdpFilterResult(); } public static com.google.cloud.modelarmor.v1.SdpFilterResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SdpFilterResult> PARSER = new com.google.protobuf.AbstractParser<SdpFilterResult>() { @java.lang.Override public SdpFilterResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SdpFilterResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SdpFilterResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.modelarmor.v1.SdpFilterResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/empire-db
36,769
empire-db/src/main/java/org/apache/empire/db/DBReader.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.empire.db; import java.io.Closeable; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.empire.commons.ClassUtils; import org.apache.empire.commons.ObjectUtils; import org.apache.empire.data.Column; import org.apache.empire.data.ColumnExpr; import org.apache.empire.data.DataType; import org.apache.empire.data.Entity; import org.apache.empire.db.exceptions.EmpireSQLException; import org.apache.empire.db.exceptions.NoPrimaryKeyException; import org.apache.empire.db.exceptions.QueryNoResultException; import org.apache.empire.db.list.DataBean; import org.apache.empire.dbms.DBMSHandler; import org.apache.empire.exceptions.BeanInstantiationException; import org.apache.empire.exceptions.InvalidArgumentException; import org.apache.empire.exceptions.InvalidOperationException; import org.apache.empire.exceptions.ObjectNotValidException; import org.apache.empire.xml.XMLUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * <P> * This class is used to perform database queries from a DBCommand object and access the results.<BR> * In oder to perform a query call the open() function or - for single row queries - call getRecordData();<BR> * You can iterate through the rows using moveNext() or an iterator.<BR> * <P> * However take care: A reader must always be explicitly closed using the close() method!<BR> * Otherwise you may lock the JDBC connection and run out of resources.<BR> * Use <PRE>try { ... } finally { reader.close(); } </PRE> to make sure the reader is closed.<BR> * <P> * To access and work with the query result you can do one of the following:<BR> * <ul> * <li>access field values directly by using one of the get... functions (see {@link DBRecordData})</li> * <li>get the rows as a list of Java Beans using by using {@link DBReader#getBeanList(Class, int)}</li> * <li>get the rows as an XML-Document using {@link DBReader#getXmlDocument()} </li> * <li>initialize a DBRecord with the current row data using {@link DBReader#initRecord(DBRecordBase)}<br> * This will allow you to modify and update the data. * </li> * </ul> * * */ public class DBReader extends DBRecordData implements Closeable { // *Deprecated* private static final long serialVersionUID = 1L; /** * DBReaderIterator * Base class for DBReader interators * @author rainer */ public abstract class DBReaderIterator implements Iterator<DBRecordData> { protected int curCount = 0; protected int maxCount = 0; public DBReaderIterator(int maxCount) { if (maxCount < 0) maxCount = 0x7FFFFFFF; // Highest positive number // Set Maxcount this.maxCount = maxCount; } /** * Implements the Iterator Interface Method remove not implemented and not applicable. */ @Override public void remove() { log.error("DBReader.remove ist not implemented!"); } /** * Disposes the iterator. */ public void dispose() { curCount = maxCount = -1; } } /** * This is an iterator for scrolling resultsets. * This iterator has no such limitations as the forward iterator. */ public class DBReaderScrollableIterator extends DBReaderIterator { public DBReaderScrollableIterator(int maxCount) { super(maxCount); } /** * Implements the Iterator Interface. * * @return true if there is another record to read */ @Override public boolean hasNext() { try { // Check position if (curCount >= maxCount) return false; // Check Recordset if (rset == null || rset.isLast() || rset.isAfterLast()) return false; // there are more records return true; } catch (SQLException e) { // Error throw new EmpireSQLException(context.getDbms(), e); } } /** * Implements the Iterator Interface. * * @return the current Record interface */ @Override public DBRecordData next() { if ((curCount < maxCount && moveNext())) { curCount++; return DBReader.this; } // Past the end! return null; } } /** * This is an iterator for forward only resultsets. * There is an important limitation on this iterator: After calling * hasNext() the caller may not use any functions on the current item any more. i.e. * Example: * while (i.hasNext()) * { * DBRecordData r = i.next(); * Object o = r.getValue(0); // ok * * bool last = i.hasNext(); // ok * Object o = r.getValue(0); // Illegal call! * } */ public class DBReaderForwardIterator extends DBReaderIterator { private boolean getCurrent = true; private boolean hasCurrent = false; public DBReaderForwardIterator(int maxCount) { super(maxCount); } /** * Implements the Iterator Interface. * * @return true if there is another record to read */ @Override public boolean hasNext() { // Check position if (curCount >= maxCount) return false; if (rset == null) throw new ObjectNotValidException(this); // Check next Record if (getCurrent == true) { getCurrent = false; hasCurrent = moveNext(); } return hasCurrent; } /** * Implements the Iterator Interface. * * @return the current Record interface */ @Override public DBRecordData next() { if (hasCurrent == false) return null; // Past the end! // next called without call to hasNext ? if (getCurrent && !moveNext()) { // No more records hasCurrent = false; getCurrent = false; return null; } // Move forward curCount++; getCurrent = true; return DBReader.this; } } // Logger protected static final Logger log = LoggerFactory.getLogger(DBReader.class); private static boolean trackOpenResultSets = false; /** * Support for finding code errors where a DBRecordSet is opened but not closed */ private static ThreadLocal<Map<DBReader, Exception>> threadLocalOpenResultSets = new ThreadLocal<Map<DBReader, Exception>>(); // the context protected final DBContext context; // Object references private DBDatabase db = null; private DBColumnExpr[] columns = null; private ResultSet rset = null; private DBMSHandler dbms = null; // the field index map private Map<ColumnExpr, Integer> fieldIndexMap = null; /** * Constructs an empty DBRecordSet object. * @param context the database context * @param useFieldIndexMap flag whether to use a fieldIndexMap */ public DBReader(DBContext context, boolean useFieldIndexMap) { this.context = context; if (useFieldIndexMap) fieldIndexMap = new HashMap<ColumnExpr, Integer>(); } /** * Constructs a default DBReader object with the fieldIndexMap enabled. * @param context the database context */ public DBReader(DBContext context) { // Simple Constructor this(context, true); } /** * Returns the current Context * @return the database context */ @Override public DBContext getContext() { return context; } /** * Returns the current DBDatabase object. * * @return the current DBDatabase object */ @SuppressWarnings("unchecked") @Override public final DBDatabase getDatabase() { return db; } public boolean getScrollable() { try { // Check Resultset return (rset!=null && rset.getType()!=ResultSet.TYPE_FORWARD_ONLY); } catch (SQLException e) { log.error("Cannot determine Resultset type", e); return false; } } /** * Returns the index value by a specified DBColumnExpr object. * * @return the index value */ @Override public int getFieldIndex(ColumnExpr column) { if (fieldIndexMap==null) return findFieldIndex(column); // Use fieldIndexMap Integer index = fieldIndexMap.get(column); if (index==null) { // add to field Index map index = findFieldIndex(column); fieldIndexMap.put(column, index); } return index; } /** Get the column Expression at position */ @Override public DBColumnExpr getColumn(int iColumn) { if (columns == null || iColumn < 0 || iColumn >= columns.length) return null; // Index out of range // return column Expression return columns[iColumn]; } /** * Returns the index value by a specified column name. * * @param column the column name * @return the index value */ @Override public int getFieldIndex(String column) { if (columns != null) { for (int i = 0; i < columns.length; i++) if (columns[i].getName().equalsIgnoreCase(column)) return i; } // not found return -1; } /** * Checks wehter a column value is null Unlike the base * class implementation, this class directly check the value fromt the * resultset. * * @param index index of the column * @return true if the value is null or false otherwise */ @Override public boolean isNull(int index) { if (index < 0 || index >= columns.length) { // Index out of range log.warn("Index {} is out of range", index); throw new InvalidArgumentException("index", index); } try { // Check Value on Resultset rset.getObject(index + 1); return rset.wasNull(); } catch (Exception e) { log.error("isNullValue exception", e); return super.isNull(index); } } /** * Returns a data value identified by the column index. * * @param index index of the column * @return the value */ @Override public Object getValue(int index) { // Check params if (index < 0 || index >= columns.length) throw new InvalidArgumentException("index", index); try { // Get Value from Resultset DataType dataType = columns[index].getDataType(); return dbms.getResultValue(rset, index + 1, dataType); } catch (SQLException e) { // Operation failed throw new EmpireSQLException(context.getDbms(), e); } } /** * Returns the record key for a type of entity * @param entity the entity type or rowset for which to get key * @return the record key */ public Object[] getRecordKey(Entity entity) { Column[] keyColumns = entity.getKeyColumns(); if (keyColumns==null || keyColumns.length==0) throw new NoPrimaryKeyException(entity); // Collect key Object[] key = new Object[keyColumns.length]; for (int i=0; i<key.length; i++) key[i] = this.get(keyColumns[i]); return key; } /** * Returns the record id for a type of entity which has a single numeric primary key * @param entity the entity type or rowset for which to get key * @return the record id * @throws InvalidArgumentException if the entity has not a single numeric primary key */ public long getRecordId(Entity entity) { Column[] keyColumns = entity.getKeyColumns(); if (keyColumns==null || keyColumns.length!=1) throw new InvalidArgumentException("entity", entity.getEntityName()); // return id return this.getLong(keyColumns[0]); } /** * Checks if the rowset is open * * @return true if the rowset is open */ public boolean isOpen() { return (rset != null); } /** * Opens the reader by executing the given SQL command.<BR> * After the reader is open, the reader's position is before the first record.<BR> * Use moveNext or iterator() to step through the rows.<BR> * Data of the current row can be accessed through the functions on the RecordData interface.<BR> * <P> * ATTENTION: After using the reader it must be closed using the close() method!<BR> * Use <PRE>try { ... } finally { reader.close(); } </PRE> to make sure the reader is closed.<BR> * <P> * @param cmd the SQL-Command with cmd.getSelect() * @param scrollable true if the reader should be scrollable or false if not */ public void open(DBCommandExpr cmd, boolean scrollable) { if (isOpen()) close(); // Get the query statement String sqlCmd = cmd.getSelect(); Object[] paramValues = cmd.getParamValues(); // Collect the query parameters /* List<Object> subqueryParamValues = (cmd instanceof DBCommand) ? findSubQueryParams((DBCommand)cmd) : null; if (subqueryParamValues!=null && !subqueryParamValues.isEmpty()) { // Check Count if (paramValues==null) { // use subquery params paramValues = subqueryParamValues.toArray(); } else if (paramValues.length!=subqueryParamValues.size()) { // number of params do not match String msg = MessageFormat.format("Invalid number of parameters query: provided={0}, required={1}; query="+cmd.getSelect(), paramValues.length, subqueryParamValues.size()); throw new UnspecifiedErrorException(msg); } } */ // Execute the query DBUtils utils = context.getUtils(); ResultSet queryRset = utils.executeQuery(sqlCmd, paramValues, scrollable); if (queryRset==null) throw new QueryNoResultException(sqlCmd); // init init(cmd.getDatabase(), cmd.getSelectExprList(), queryRset); } /** * Opens the reader by executing the given SQL command.<BR> * <P> * see {@link DBReader#open(DBCommandExpr, boolean)} * </P> * @param cmd the SQL-Command with cmd.getSelect() */ public final void open(DBCommandExpr cmd) { open(cmd, false); } /** * <P> * Opens the reader by executing the given SQL command and moves to the first row.<BR> * If true is returned data of the row can be accessed through the functions on the RecordData interface.<BR> * This function is intended for single row queries and provided for convenience.<BR> * However it behaves exacly as calling reader.open() and reader.moveNext()<BR> * <P> * ATTENTION: After using the reader it must be closed using the close() method!<BR> * Use <PRE>try { ... } finally { reader.close(); } </PRE> to make sure the reader is closed.<BR> * <P> * @param cmd the SQL-Command with cmd.getSelect() */ public void getRecordData(DBCommandExpr cmd) { // Open the record open(cmd); // Get First Record if (!moveNext()) { // Close throw new QueryNoResultException(cmd.getSelect()); } } /** * Closes the DBRecordSet object, the Statement object and detach the columns.<BR> * A reader must always be closed immediately after using it. */ @Override public void close() { try { // Dispose iterator if (iterator != null) { iterator.dispose(); iterator = null; } // Close JDBC-Resultset if (rset != null) { // call dbms context.getDbms().closeResultSet(rset); // remove from tracking-list endTrackingThisResultSet(); } // Detach columns columns = null; rset = null; dbms = null; // clear FieldIndexMap if (fieldIndexMap!=null) fieldIndexMap.clear(); // Done } catch (Exception e) { // What's wrong here? log.warn(e.toString()); } } /** * Moves the cursor down the given number of rows. * * @param count the number of rows to skip * * @return true if the reader is on a valid record or false otherwise */ public boolean skipRows(int count) { try { // Check Recordset if (rset == null) throw new ObjectNotValidException(this); // Forward only cursor? int type = rset.getType(); if (type == ResultSet.TYPE_FORWARD_ONLY) { if (count < 0) throw new InvalidArgumentException("count", count); // Move for (; count > 0; count--) { if (!moveNext()) return false; } return true; } // Scrollable Cursor if (count > 0) { // Move a single record first if (rset.next() == false) return false; // Move relative if (count > 1) return rset.relative(count - 1); } else if (count < 0) { // Move a single record first if (rset.previous() == false) return false; // Move relative if (count < -1) return rset.relative(count + 1); } return true; } catch (SQLException e) { // an error occurred throw new EmpireSQLException(context.getDbms(), e); } } /** * Moves the cursor down one row from its current position. * * @return true if the reader is on a valid record or false otherwise */ public boolean moveNext() { try { // Check Recordset if (rset == null) throw new ObjectNotValidException(this); // Move Next if (rset.next() == false) { // Close recordset automatically after last record close(); return false; } return true; } catch (SQLException e) { // an error occurred throw new EmpireSQLException(context.getDbms(), e); } } private DBReaderIterator iterator = null; // there can only be one! /** * Returns an row iterator for this reader.<BR> * There can only be one iterator at a time. * <P> * @param maxCount the maximum number of item that should be returned by this iterator * @return the row iterator */ public Iterator<DBRecordData> iterator(int maxCount) { if (iterator == null && rset != null) { if (getScrollable()) iterator = new DBReaderScrollableIterator(maxCount); else iterator = new DBReaderForwardIterator(maxCount); } return iterator; } /** * <PRE> * Returns an row iterator for this reader. * There can only be one iterator at a time. * </PRE> * @return the row iterator */ public final Iterator<DBRecordData> iterator() { return iterator(-1); } /** * <PRE> * initializes a DBRecord object with the values of the current row. * At least all primary key columns of the target rowset must be provided by this reader. * This function is equivalent to calling rowset.initRecord(rec, reader) * set also {@link DBRowSet#initRecord(DBRecordBase, DBRecordData)}); * </PRE> * @param rec the record which to initialize */ public void initRecord(DBRecordBase rec) { // Check Open if (!isOpen()) { // Resultset not available throw new ObjectNotValidException(this); } // init Record DBRowSet rowset = rec.getRowSet(); rowset.initRecord(rec, this); } /** * Returns the result of a query as a list of objects restricted * to a maximum number of objects (unless maxCount is -1). * * @param <L> the list type * @param <T> the list item type * * @param list the collection to add the objects to * @param t the class type of the objects in the list * @param parent the bean parent * @param maxCount the maximum number of objects * * @return the list of T */ @SuppressWarnings("unchecked") public <L extends List<T>, T> L getBeanList(L list, Class<T> t, Object parent, int maxCount) { // Check Open if (!isOpen()) { // Resultset not available throw new ObjectNotValidException(this); } // Query List try { // Find Constructor Constructor<?> ctor = findBeanConstructor(t); Object[] args = (ctor!=null) ? new Object[getFieldCount()] : null; Class<?>[] ctorParamTypes = (ctor!=null) ? ctor.getParameterTypes() : null; // Create a list of beans int rownum = 0; while (moveNext() && maxCount != 0) { // Create bean an init T bean; if (ctor!=null) { // Use Constructor for (int i = 0; i < getFieldCount(); i++) args[i] = ObjectUtils.convert(ctorParamTypes[i], getValue(i)); bean = (T)ctor.newInstance(args); } else { // Use Property Setters bean = t.newInstance(); setBeanProperties(bean); } // add list.add(bean); rownum++; // post processing if (bean instanceof DataBean<?>) ((DataBean<?>)bean).initialize(((DBObject)this).getDatabase(), context, rownum, parent); // Decrease count if (maxCount > 0) maxCount--; } // done return list; } catch (InvocationTargetException | IllegalAccessException | InstantiationException e) { // ReflectiveOperationException throw new BeanInstantiationException(t, e); } } /** * Returns the result of a query as a list of objects. * @param <T> the list item type * @param t the class type of the objects in the list * @param maxItems the maximum number of objects * @return the list of T */ public final <T> List<T> getBeanList(Class<T> t, int maxItems) { return getBeanList(new ArrayList<T>(), t, null, maxItems); } /** * Returns the result of a query as a list of objects. * @param <T> the list item type * @param t the class type of the objects in the list * @return the list of T */ public final <T> List<T> getBeanList(Class<T> t) { return getBeanList(t, -1); } /** * Moves the cursor down one row from its current position. * * @return the number of column descriptions added to the Element */ @Override public int addXmlMeta(Element parent) { if (columns == null) throw new ObjectNotValidException(this); // Add Field Description for (int i = 0; i < columns.length; i++) columns[i].addXml(parent, 0); // return count return columns.length; } /** * Adds all children to a parent. * * @param parent the parent element below which to search the child * @return the number of row values added to the element */ @Override public int addXmlData(Element parent) { if (rset == null) throw new ObjectNotValidException(this); // Add all children for (int i = 0; i < columns.length; i++) { // Read all String name = columns[i].getName(); String idColumnAttr = getXmlDictionary().getRowIdColumnAttribute(); if (name.equalsIgnoreCase("id")) { // Add Attribute parent.setAttribute(idColumnAttr, getString(i)); } else { // Add Element String value = getString(i); Element elem = XMLUtil.addElement(parent, name, value); if (value == null) elem.setAttribute("null", "yes"); // Null-Value } } // return count return columns.length; } /** * Adds all children to a parent. * * @param parent the parent element below which to search the child * @return the number of rows added to the element */ public int addRows(Element parent) { int count = 0; if (rset == null) return 0; // Add all rows String rowElementName = getXmlDictionary().getRowElementName(); while (moveNext()) { addXmlData(XMLUtil.addElement(parent, rowElementName)); count++; } return count; } /** * returns the DBXmlDictionary that should used to generate XMLDocuments<BR> * @return the DBXmlDictionary */ protected DBXmlDictionary getXmlDictionary() { return DBXmlDictionary.getInstance(); } /** * Returns a XML document with the field description an values of this record. * * @return the new XML Document object */ @Override public Document getXmlDocument() { if (rset == null) return null; // Create Document String rowsetElementName = getXmlDictionary().getRowSetElementName(); Element root = XMLUtil.createDocument(rowsetElementName); // Add Field Description addXmlMeta(root); // Add row rset addRows(root); // return Document return root.getOwnerDocument(); } /** returns the number of the elements of the colList array */ @Override public int getFieldCount() { return (columns != null) ? columns.length : 0; } /** * Initialize the reader from an open JDBC-ResultSet * @param db the database * @param columns the query column expressions * @param rset the JDBC-ResultSet */ protected void init(DBDatabase db, DBColumnExpr[] columns, ResultSet rset) { this.db = db; this.dbms = db.getDbms(); this.columns = columns; this.rset = rset; // clear fieldIndexMap if (fieldIndexMap!=null) fieldIndexMap.clear(); // add to tracking list (if enabled) trackThisResultSet(); } /** * Access the column expression list * @return the column expression list */ protected final DBColumnExpr[] getColumnExprList() { return columns; } /** * Access the JDBC-ResultSet * @return the JDBC-ResultSet */ protected final ResultSet getResultSet() { return rset; } /** * finds the field Index of a given column expression * Internally used as helper for getFieldIndex() * @param column the column to find * @return the index value */ protected int findFieldIndex(ColumnExpr column) { if (columns == null) throw new ObjectNotValidException(this); // First chance: Try to find an expression match int index = ObjectUtils.indexOf(columns, column); if (index>= 0) return index; // Second chance: Try Update Column if (column instanceof DBColumn) { for (int i = 0; i < columns.length; i++) { DBColumn updColumn = columns[i].getUpdateColumn(); if (updColumn!=null && updColumn.equals(column)) return i; // Query Expression? if (updColumn instanceof DBQueryColumn) { updColumn = ((DBQueryColumn)updColumn).getExpr().getUpdateColumn(); if (updColumn!=null && updColumn.equals(column)) return i; } } } // not found! return -1; } /** * internal helper function to find parameterized subqueries * @param cmd the command * @return a list of parameter arrays, one for each subquery protected List<Object> findSubQueryParams(DBCommand cmd) { List<Object> subQueryParams = null; List<DBJoinExpr> joins = cmd.getJoins(); if (joins==null) return null; // no joins // check the joins for (DBJoinExpr j : joins) { DBRowSet rsl = j.getLeftTable(); DBRowSet rsr = j.getRightTable(); if (rsl instanceof DBQuery) { // the left join is a query subQueryParams = addSubQueryParams((DBQuery)rsl, subQueryParams); } if (rsr instanceof DBQuery) { // the right join is a query subQueryParams = addSubQueryParams((DBQuery)rsr, subQueryParams); } } return subQueryParams; } */ /** * Adds any subquery params to the supplied list * @param query the subquery * @param list the current list of parameters * @return the new list of parameters private List<Object> addSubQueryParams(DBQuery query, List<Object> list) { DBCommandExpr sqcmd = query.getCommandExpr(); Object[] params = query.getCommandExpr().getParamValues(); if (params!=null && params.length>0) { // add params if (list== null) list = new ArrayList<Object>(); for (Object p : params) list.add(p); } // recurse if (sqcmd instanceof DBCommand) { // check this command too List<Object> sqlist = findSubQueryParams((DBCommand)sqcmd); if (sqlist!=null && !sqlist.isEmpty()) { // make one list if (list!= null) list.addAll(sqlist); else list = sqlist; } } return list; } */ /** * Returns a constructor for a bean class for the set of parameters or null if no suitable constructor is found * @param beanClass the bean class * @return a constructor for the readers columns or null if not suitable constructor is available */ protected Constructor<?> findBeanConstructor(Class<?> beanClass) { // Check whether we can use a constructor Class<?>[] paramTypes = new Class[getFieldCount()]; for (int i = 0; i < columns.length; i++) paramTypes[i] = columns[i].getJavaType(); // Find Constructor Constructor<?> ctor = ClassUtils.findMatchingConstructor(beanClass, -1, paramTypes); return ctor; } /** * Support for finding code errors where a DBRecordSet is opened but not closed. * * @author bond */ protected synchronized void trackThisResultSet() { // check if enabled if (trackOpenResultSets==false) return; // add this to the vector of open resultsets on this thread Map<DBReader, Exception> openResultSets = threadLocalOpenResultSets.get(); if (openResultSets == null) { // Lazy initialization of the openResultSets = new HashMap<DBReader, Exception>(2); threadLocalOpenResultSets.set(openResultSets); } Exception stackException = openResultSets.get(this); if (stackException != null) { log.error("DBRecordSet.addOpenResultSet called for an object which is already in the open list. This is the stack of the method opening the object which was not previously closed.", stackException); // the code continues and overwrites the logged object with the new one } // get the current stack trace openResultSets.put(this, new Exception()); } /** * Support for finding code errors where a DBRecordSet is opened but not closed. * * @author bond */ protected synchronized void endTrackingThisResultSet() { // check if enabled if (trackOpenResultSets==false) return; // remove Map<DBReader, Exception> openResultSets = threadLocalOpenResultSets.get(); if (openResultSets.containsKey(this) == false) { log.error("DBRecordSet.removeOpenResultSet called for an object which is not in the open list. Here is the current stack.", new Exception()); } else { openResultSets.remove(this); } } /** * Enables or disabled tracking of open ResultSets * @param enable true to enable or false otherwise * @return the previous state of the trackOpenResultSets */ public static synchronized boolean enableOpenResultSetTracking(boolean enable) { boolean prev = trackOpenResultSets; trackOpenResultSets = enable; return prev; } /** * <PRE> * Call this if you want to check whether there are any unclosed resultsets * It logs stack traces to help find piece of code * where a DBReader was opened but not closed. * </PRE> */ public static synchronized void checkOpenResultSets() { // check if enabled if (trackOpenResultSets==false) throw new InvalidOperationException("Open-ResultSet-Tracking has not been enabled. Use DBReader.enableOpenResultSetTracking() to enable or disable."); // Check map Map<DBReader, Exception> openResultSets = threadLocalOpenResultSets.get(); if (openResultSets != null && openResultSets.isEmpty() == false) { // we have found a(n) open result set(s). Now show the stack trace(s) Object keySet[] = openResultSets.keySet().toArray(); for (int i = 0; i < keySet.length; i++) { Exception stackException = openResultSets.get(keySet[i]); log.error("A DBReader was not closed. Stack of opening code is ", stackException); } openResultSets.clear(); } } }
oracle/graal
37,028
truffle/src/com.oracle.truffle.api/src/com/oracle/truffle/api/ArrayUtils.java
/* * Copyright (c) 2018, 2022, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.api; import java.lang.reflect.Field; import java.nio.ByteOrder; import com.oracle.truffle.api.CompilerDirectives.CompilationFinal; import sun.misc.Unsafe; /** * This class provides additional operations for {@link String} as well as character and byte * arrays, which may be intrinsified by a compiler. * * @since 19.0 */ public final class ArrayUtils { private ArrayUtils() { } @SuppressWarnings("deprecation"/* JDK-8277863 */) static long getObjectFieldOffset(Field field) { return UNSAFE.objectFieldOffset(field); } private static final sun.misc.Unsafe UNSAFE = getUnsafe(); private static final long javaStringValueFieldOffset; private static final long javaStringCoderFieldOffset; static { Field valueField = getStringDeclaredField("value"); javaStringValueFieldOffset = getObjectFieldOffset(valueField); Field coderField = getStringDeclaredField("coder"); javaStringCoderFieldOffset = getObjectFieldOffset(coderField); } private static Field getStringDeclaredField(String name) { try { return String.class.getDeclaredField(name); } catch (NoSuchFieldException e) { throw new RuntimeException("failed to get " + name + " field offset", e); } } private static Unsafe getUnsafe() { try { return Unsafe.getUnsafe(); } catch (SecurityException e1) { try { Field theUnsafeInstance = Unsafe.class.getDeclaredField("theUnsafe"); theUnsafeInstance.setAccessible(true); return (Unsafe) theUnsafeInstance.get(Unsafe.class); } catch (Exception e2) { throw new RuntimeException("exception while trying to get Unsafe.theUnsafe via reflection:", e2); } } } private static byte[] getJavaStringArray(String str) { Object value = UNSAFE.getObject(str, javaStringValueFieldOffset); assert value instanceof byte[]; return (byte[]) value; } private static boolean isCompactString(String s) { return UNSAFE.getByte(s, javaStringCoderFieldOffset) == 0; } /** * Returns the index of the first occurrence of any character contained in {@code values} in * {@code string}, bounded by {@code fromIndex} (inclusive) and {@code maxIndex} (exclusive). * * @return the index of the first occurrence of any character contained in {@code values} in * {@code string} that is greater than or equal to {@code fromIndex} and less than * {@code maxIndex}, or {@code -1} if none of the characters occur. * @since 19.0 */ public static int indexOf(String string, int fromIndex, int maxIndex, char... values) { checkArgs(string.length(), fromIndex, maxIndex, values.length); if (fromIndex >= string.length()) { return -1; } if (values.length <= 4) { if (isCompactString(string)) { int valuesInt = 0; int nValues = 0; for (int i = 0; i < values.length; i++) { if (values[i] <= 0xff) { valuesInt = (valuesInt << 8) | values[i]; nValues++; } } return nValues == 0 ? -1 : indexOfBS1(getJavaStringArray(string), fromIndex, maxIndex, valuesInt, nValues); } else { return indexOfBS2(getJavaStringArray(string), fromIndex, maxIndex, values, values.length); } } return runIndexOfS2(string, fromIndex, maxIndex, values); } /** * Returns the index of the first occurrence of any character contained in {@code values} in * {@code array}, bounded by {@code fromIndex} (inclusive) and {@code maxIndex} (exclusive). * * @return the index of the first occurrence of any character contained in {@code values} in * {@code array} that is greater than or equal to {@code fromIndex} and less than * {@code maxIndex}, or {@code -1} if none of the characters occur. * @since 19.0 */ public static int indexOf(char[] array, int fromIndex, int maxIndex, char... values) { checkArgs(array.length, fromIndex, maxIndex, values.length); if (fromIndex >= array.length) { return -1; } if (values.length <= 4) { return indexOfCS2(array, fromIndex, maxIndex, values, values.length); } return runIndexOfS2(array, fromIndex, maxIndex, values); } /** * Returns the index of the first occurrence of any byte contained in {@code values} in * {@code array}, bounded by {@code fromIndex} (inclusive) and {@code maxIndex} (exclusive). * * @return the index of the first occurrence of any byte contained in {@code values} in * {@code array} that is greater than or equal to {@code fromIndex} and less than * {@code maxIndex}, or {@code -1} if none of the values occur. * @since 19.0 */ public static int indexOf(byte[] array, int fromIndex, int maxIndex, byte... values) { checkArgs(array.length, fromIndex, maxIndex, values.length); if (fromIndex >= array.length) { return -1; } if (values.length <= 4) { return indexOfBS1(array, fromIndex, maxIndex, values, values.length); } return runIndexOfS1(array, fromIndex, maxIndex, values); } /** * Returns the index of the first region of {@code haystack} that equals {@code needle} after * being OR'ed with {@code mask}, bounded by {@code fromIndex} (inclusive) and {@code length}. * Performs best if {@code needle} and {@code mask} are {@link CompilationFinal} with * {@code dimensions = 1}. * * @return the index of the first region of {@code haystack} where for all indices {@code i} of * {@code needle} {@code (haystack[index + i] | mask[i]) == needle[i]} holds, and * {@code fromIndex <= index && index + needle.length <= fromIndex + length} holds, or * {@code -1} if no such region is found. * @since 19.3 */ public static int indexOfWithOrMask(byte[] haystack, int fromIndex, int length, byte[] needle, byte[] mask) { checkArgsIndexOf(haystack.length, fromIndex, length); if (mask != null) { checkMaskLengthIndexOf(needle.length, mask.length); } if (needle.length == 0) { return fromIndex; } if (length - needle.length < 0) { return -1; } else if (needle.length == 1) { if (mask == null) { return stubIndexOfB1S1(haystack, fromIndex, fromIndex + length, Byte.toUnsignedInt(needle[0])); } return stubIndexOfWithOrMaskS1(haystack, fromIndex, fromIndex + length, Byte.toUnsignedInt(needle[0]), Byte.toUnsignedInt(mask[0])); } else { int max = fromIndex + length - (needle.length - 2); int index = fromIndex; if (mask == null) { while (index < max - 1) { index = stubIndexOf2ConsecutiveS1(haystack, index, max, Byte.toUnsignedInt(needle[0]), Byte.toUnsignedInt(needle[1])); if (index < 0) { return -1; } if (needle.length == 2 || stubRegionEqualsS1(haystack, index, needle, 0, needle.length)) { return index; } index++; } } else { while (index < max - 1) { index = stubIndexOf2ConsecutiveWithOrMaskS1(haystack, index, max, Byte.toUnsignedInt(needle[0]), Byte.toUnsignedInt(needle[1]), Byte.toUnsignedInt(mask[0]), Byte.toUnsignedInt(mask[1])); if (index < 0) { return -1; } if (needle.length == 2 || stubRegionEqualsWithOrMaskS1(haystack, index, needle, 0, mask, mask.length)) { return index; } index++; } } return -1; } } /** * Returns the index of the first region of {@code haystack} that equals {@code needle} after * being OR'ed with {@code mask}, bounded by {@code fromIndex} (inclusive) and {@code length}. * Performs best if {@code needle} and {@code mask} are {@link CompilationFinal} with * {@code dimensions = 1}. * * @return the index of the first region of {@code haystack} where for all indices {@code i} of * {@code needle} {@code (haystack[index + i] | mask[i]) == needle[i]} holds, and * {@code fromIndex <= index && index + needle.length <= fromIndex + length} holds, or * {@code -1} if no such region is found. * @since 19.3 */ public static int indexOfWithOrMask(char[] haystack, int fromIndex, int length, char[] needle, char[] mask) { checkArgsIndexOf(haystack.length, fromIndex, length); if (mask != null) { checkMaskLengthIndexOf(needle.length, mask.length); } if (needle.length == 0) { return fromIndex; } if (length - needle.length < 0) { return -1; } else if (needle.length == 1) { if (mask == null) { return stubIndexOfC1S2(haystack, fromIndex, fromIndex + length, needle[0]); } return stubIndexOfWithOrMaskS2(haystack, fromIndex, fromIndex + length, needle[0], mask[0]); } else { int max = fromIndex + length - (needle.length - 2); int index = fromIndex; if (mask == null) { while (index < max - 1) { index = stubIndexOf2ConsecutiveS2(haystack, index, max, needle[0], needle[1]); if (index < 0) { return -1; } if (needle.length == 2 || stubRegionEqualsS2(haystack, index, needle, 0, needle.length)) { return index; } index++; } } else { while (index < max - 1) { index = stubIndexOf2ConsecutiveWithOrMaskS2(haystack, index, max, needle[0], needle[1], mask[0], mask[1]); if (index < 0) { return -1; } if (needle.length == 2 || stubRegionEqualsWithOrMaskS2(haystack, index, needle, 0, mask, mask.length)) { return index; } index++; } } return -1; } } /** * Returns the index of the first region of {@code haystack} that equals {@code needle} after * being OR'ed with {@code mask}, bounded by {@code fromIndex} (inclusive) and {@code length}. * Performs best if {@code needle} and {@code mask} are {@link CompilationFinal}. * * @return the index of the first region of {@code haystack} where for all indices {@code i} of * {@code needle} * {@code (haystack.charAt(index + i) | mask.charAt(i)) == needle.charAt(i)} holds, and * {@code fromIndex <= index && index + needle.length() <= fromIndex + length} holds, or * {@code -1} if no such region is found. * @since 19.3 */ public static int indexOfWithOrMask(String haystack, int fromIndex, int length, String needle, String mask) { checkArgsIndexOf(haystack.length(), fromIndex, length); if (mask != null) { checkMaskLengthIndexOf(needle.length(), mask.length()); } if (needle.isEmpty()) { return fromIndex; } if (length - needle.length() < 0) { return -1; } else if (needle.length() == 1) { return indexOfWithOrMaskJLString(haystack, fromIndex, length, needle, mask); } else { int max = fromIndex + length - (needle.length() - 2); int index = fromIndex; while (index < max - 1) { index = indexOf2ConsecutiveWithOrMaskJLString(haystack, index, needle, mask, max); if (index < 0) { return -1; } if (needle.length() == 2 || regionEqualsWithOrMask(haystack, index, needle, 0, needle.length(), mask)) { return index; } index++; } return -1; } } private static int indexOfWithOrMaskJLString(String haystack, int fromIndex, int length, String needle, String mask) { int maxIndex = fromIndex + length; int v0 = needle.charAt(0); byte[] array = getJavaStringArray(haystack); if (mask == null) { if (isCompactString(haystack)) { return v0 <= 0xff ? stubIndexOfB1S1(array, fromIndex, maxIndex, v0) : -1; } else { return stubIndexOfB1S2(array, fromIndex, maxIndex, v0); } } else { int mask0 = mask.charAt(0); if (isCompactString(haystack)) { return (v0 ^ mask0) <= 0xff ? stubIndexOfWithOrMaskS1(array, fromIndex, maxIndex, v0, mask0) : -1; } else { return stubIndexOfWithOrMaskS2(array, fromIndex, maxIndex, v0, mask0); } } } private static int indexOf2ConsecutiveWithOrMaskJLString(String haystack, int fromIndex, String needle, String mask, int max) { char v0 = needle.charAt(0); char v1 = needle.charAt(1); byte[] array = getJavaStringArray(haystack); if (mask == null) { if (isCompactString(haystack)) { return v0 <= 0xff && v1 <= 0xff ? stubIndexOf2ConsecutiveS1(array, fromIndex, max, v0, v1) : -1; } else { return stubIndexOf2ConsecutiveS2(array, fromIndex, max, v0, v1); } } else { char mask0 = mask.charAt(0); char mask1 = mask.charAt(1); if (isCompactString(haystack)) { return (v0 ^ mask0) <= 0xff && (v1 ^ mask1) <= 0xff ? stubIndexOf2ConsecutiveWithOrMaskS1(array, fromIndex, max, v0, v1, mask0, mask1) : -1; } else { return stubIndexOf2ConsecutiveWithOrMaskS2(array, fromIndex, max, v0, v1, mask0, mask1); } } } /** * Returns {@code true} iff for all indices {@code i} from {@code 0} (inclusive) to * {@code length} (exclusive), {@code (a[offsetA + i] | mask[i]) == b[offsetB + i]} holds. * Performs best if {@code length} and {@code mask} are {@link CompilationFinal} with * {@code dimensions = 1}. If {@code mask} is {@code null}, it is treated as if it was filled * with zeroes. * * @since 19.3 */ public static boolean regionEqualsWithOrMask(byte[] a, int offsetA, byte[] b, int offsetB, int length, byte[] mask) { requireNonNull(a); requireNonNull(b); checkArgsRegionEquals(offsetA, offsetB, length); if (regionEqualsOutOfBounds(a.length, offsetA, b.length, offsetB, length)) { return false; } if (mask == null) { return stubRegionEqualsS1(a, offsetA, b, offsetB, length); } checkMaskLengthRegionEquals(length, mask.length); return stubRegionEqualsWithOrMaskS1(a, offsetA, b, offsetB, mask, mask.length); } /** * Returns {@code true} iff for all indices {@code i} from {@code 0} (inclusive) to * {@code length} (exclusive), {@code (a[offsetA + i] | mask[i]) == b[offsetB + i]} holds. * Performs best if {@code length} and {@code mask} are {@link CompilationFinal} with * {@code dimensions = 1}. If {@code mask} is {@code null}, it is treated as if it was filled * with zeroes. * * @since 19.3 */ public static boolean regionEqualsWithOrMask(char[] a, int offsetA, char[] b, int offsetB, int length, char[] mask) { requireNonNull(a); requireNonNull(b); checkArgsRegionEquals(offsetA, offsetB, length); if (regionEqualsOutOfBounds(a.length, offsetA, b.length, offsetB, length)) { return false; } if (mask == null) { return stubRegionEqualsS2(a, offsetA, b, offsetB, length); } checkMaskLengthRegionEquals(length, mask.length); return stubRegionEqualsWithOrMaskS2(a, offsetA, b, offsetB, mask, mask.length); } /** * Returns {@code true} iff for all indices {@code i} from {@code 0} (inclusive) to * {@code length} (exclusive), * {@code (a.charAt(offsetA + i) | mask.charAt(i)) == b.charAt(offsetB + i)} holds. Performs * best if {@code length} and {@code mask} are {@link CompilationFinal} with * {@code dimensions = 1}. If {@code mask} is {@code null}, it is treated as if it was filled * with zeroes. * * @since 19.3 */ public static boolean regionEqualsWithOrMask(String a, int offsetA, String b, int offsetB, int length, String mask) { requireNonNull(a); requireNonNull(b); checkArgsRegionEquals(offsetA, offsetB, length); if (regionEqualsOutOfBounds(a.length(), offsetA, b.length(), offsetB, length)) { return false; } if (mask == null) { byte[] arrayA = getJavaStringArray(a); byte[] arrayB = getJavaStringArray(b); boolean compactA = isCompactString(a); if (compactA != isCompactString(b)) { return stubRegionEqualsS2S1( compactA ? arrayB : arrayA, compactA ? offsetB : offsetA, compactA ? arrayA : arrayB, compactA ? offsetA : offsetB, length); } else { final int byteOffsetA; final int byteOffsetB; final int byteLength; if (compactA) { byteOffsetA = offsetA; byteOffsetB = offsetB; byteLength = length; } else { byteOffsetA = offsetA << 1; byteOffsetB = offsetB << 1; byteLength = length << 1; } return stubRegionEqualsS1(arrayA, byteOffsetA, arrayB, byteOffsetB, byteLength); } } else { checkMaskLengthRegionEquals(length, mask.length()); byte[] arrayA = getJavaStringArray(a); byte[] arrayB = getJavaStringArray(b); byte[] arrayM = getJavaStringArray(mask); boolean compact1 = isCompactString(a); boolean compact2 = isCompactString(b); boolean compactMask = isCompactString(mask); if (compact2) { if (compactMask) { if (compact1) { return stubRegionEqualsWithOrMaskCompactStrings(arrayA, offsetA, arrayB, offsetB, arrayM, mask.length(), true, true, true); } else { return stubRegionEqualsWithOrMaskCompactStrings(arrayA, offsetA, arrayB, offsetB, arrayM, mask.length(), false, true, true); } } else { return false; } } else { if (compactMask) { if (compact1) { return stubRegionEqualsWithOrMaskCompactStrings(arrayA, offsetA, arrayB, offsetB, arrayM, mask.length(), true, false, true); } else { return stubRegionEqualsWithOrMaskCompactStrings(arrayA, offsetA, arrayB, offsetB, arrayM, mask.length(), false, false, true); } } else { if (compact1) { return stubRegionEqualsWithOrMaskCompactStrings(arrayA, offsetA, arrayB, offsetB, arrayM, mask.length(), true, false, false); } else { return stubRegionEqualsWithOrMaskCompactStrings(arrayA, offsetA, arrayB, offsetB, arrayM, mask.length(), false, false, false); } } } } } private static boolean regionEqualsOutOfBounds(int lengthA, int offsetA, int lengthB, int offsetB, int length) { return lengthA - offsetA < length || lengthB - offsetB < length; } private static void checkArgsRegionEquals(int offsetA, int offsetB, int length) { if (offsetA < 0 || offsetB < 0 || length < 0) { illegalArgumentException("length, offsetA and offsetB must be positive"); } } private static void checkMaskLengthRegionEquals(int length, int maskLength) { if (length > maskLength) { illegalArgumentException("mask length must be greater or equal to length"); } } private static void checkArgs(int length, int fromIndex, int maxIndex, int nValues) { if (fromIndex < 0) { illegalArgumentException("fromIndex must be positive"); } if (maxIndex > length || maxIndex < fromIndex) { illegalArgumentException("maxIndex out of range"); } if (nValues == 0) { illegalArgumentException("no search values provided"); } } private static void checkArgsIndexOf(int hayStackLength, int fromIndex, int length) { if (fromIndex < 0 || length < 0) { illegalArgumentException("fromIndex and length must be positive"); } if (Integer.toUnsignedLong(fromIndex) + Integer.toUnsignedLong(length) > hayStackLength) { illegalArgumentException("length out of range"); } } private static void checkMaskLengthIndexOf(int lengthB, int maskLength) { if (lengthB != maskLength) { illegalArgumentException("mask and needle length must be equal"); } } private static void requireNonNull(Object obj) { if (obj == null) { CompilerDirectives.transferToInterpreterAndInvalidate(); throw new NullPointerException(); } } private static void illegalArgumentException(String msg) { CompilerDirectives.transferToInterpreterAndInvalidate(); throw new IllegalArgumentException(msg); } private static int indexOfBS1(byte[] array, int fromIndex, int maxIndex, int values, int nValues) { if (nValues == 1) { return stubIndexOfB1S1(array, fromIndex, maxIndex, values & 0xff); } else if (nValues == 2) { return stubIndexOfB2S1(array, fromIndex, maxIndex, values & 0xff, (values >>> 8) & 0xff); } else if (nValues == 3) { return stubIndexOfB3S1(array, fromIndex, maxIndex, values & 0xff, (values >>> 8) & 0xff, (values >>> 16) & 0xff); } else { return stubIndexOfB4S1(array, fromIndex, maxIndex, values & 0xff, (values >>> 8) & 0xff, (values >>> 16) & 0xff, (values >>> 24) & 0xff); } } private static int indexOfBS1(byte[] array, int fromIndex, int maxIndex, byte[] bytes, int nValues) { if (nValues == 1) { return stubIndexOfB1S1(array, fromIndex, maxIndex, Byte.toUnsignedInt(bytes[0])); } else if (nValues == 2) { return stubIndexOfB2S1(array, fromIndex, maxIndex, Byte.toUnsignedInt(bytes[0]), Byte.toUnsignedInt(bytes[1])); } else if (nValues == 3) { return stubIndexOfB3S1(array, fromIndex, maxIndex, Byte.toUnsignedInt(bytes[0]), Byte.toUnsignedInt(bytes[1]), Byte.toUnsignedInt(bytes[2])); } else { return stubIndexOfB4S1(array, fromIndex, maxIndex, Byte.toUnsignedInt(bytes[0]), Byte.toUnsignedInt(bytes[1]), Byte.toUnsignedInt(bytes[2]), Byte.toUnsignedInt(bytes[3])); } } private static int indexOfBS2(byte[] array, int fromIndex, int maxIndex, char[] chars, int nValues) { if (nValues == 1) { return stubIndexOfB1S2(array, fromIndex, maxIndex, chars[0]); } else if (nValues == 2) { return stubIndexOfB2S2(array, fromIndex, maxIndex, chars[0], chars[1]); } else if (nValues == 3) { return stubIndexOfB3S2(array, fromIndex, maxIndex, chars[0], chars[1], chars[2]); } else { return stubIndexOfB4S2(array, fromIndex, maxIndex, chars[0], chars[1], chars[2], chars[3]); } } private static int indexOfCS2(char[] array, int fromIndex, int maxIndex, char[] chars, int nValues) { if (nValues == 1) { return stubIndexOfC1S2(array, fromIndex, maxIndex, chars[0]); } else if (nValues == 2) { return stubIndexOfC2S2(array, fromIndex, maxIndex, chars[0], chars[1]); } else if (nValues == 3) { return stubIndexOfC3S2(array, fromIndex, maxIndex, chars[0], chars[1], chars[2]); } else { return stubIndexOfC4S2(array, fromIndex, maxIndex, chars[0], chars[1], chars[2], chars[3]); } } private static int stubIndexOfB1S1(byte[] array, int fromIndex, int maxIndex, int v1) { return runIndexOfS1(array, fromIndex, maxIndex, v1); } private static int stubIndexOfB2S1(byte[] array, int fromIndex, int maxIndex, int v1, int v2) { return runIndexOfS1(array, fromIndex, maxIndex, v1, v2); } private static int stubIndexOfB3S1(byte[] array, int fromIndex, int maxIndex, int v1, int v2, int v3) { return runIndexOfS1(array, fromIndex, maxIndex, v1, v2, v3); } private static int stubIndexOfB4S1(byte[] array, int fromIndex, int maxIndex, int v1, int v2, int v3, int v4) { return runIndexOfS1(array, fromIndex, maxIndex, v1, v2, v3, v4); } private static int stubIndexOfB1S2(byte[] array, int fromIndex, int maxIndex, int v1) { return runIndexOfS2(array, fromIndex, maxIndex, v1); } private static int stubIndexOfB2S2(byte[] array, int fromIndex, int maxIndex, int v1, int v2) { return runIndexOfS2(array, fromIndex, maxIndex, v1, v2); } private static int stubIndexOfB3S2(byte[] array, int fromIndex, int maxIndex, int v1, int v2, int v3) { return runIndexOfS2(array, fromIndex, maxIndex, v1, v2, v3); } private static int stubIndexOfB4S2(byte[] array, int fromIndex, int maxIndex, int v1, int v2, int v3, int v4) { return runIndexOfS2(array, fromIndex, maxIndex, v1, v2, v3, v4); } private static int stubIndexOfC1S2(char[] array, int fromIndex, int maxIndex, int v1) { return runIndexOfS2(array, fromIndex, maxIndex, v1); } private static int stubIndexOfC2S2(char[] array, int fromIndex, int maxIndex, int v1, int v2) { return runIndexOfS2(array, fromIndex, maxIndex, v1, v2); } private static int stubIndexOfC3S2(char[] array, int fromIndex, int maxIndex, int v1, int v2, int v3) { return runIndexOfS2(array, fromIndex, maxIndex, v1, v2, v3); } private static int stubIndexOfC4S2(char[] array, int fromIndex, int maxIndex, int v1, int v2, int v3, int v4) { return runIndexOfS2(array, fromIndex, maxIndex, v1, v2, v3, v4); } private static int stubIndexOfWithOrMaskS1(byte[] haystack, int fromIndex, int maxIndex, int needle, int mask) { for (int i = fromIndex; i < maxIndex; i++) { if ((Byte.toUnsignedInt(haystack[i]) | mask) == needle) { return i; } } return -1; } private static int stubIndexOfWithOrMaskS2(byte[] haystack, int fromIndex, int maxIndex, int needle, int mask) { for (int i = fromIndex; i < maxIndex; i++) { if ((readChar(haystack, i) | mask) == needle) { return i; } } return -1; } private static int stubIndexOfWithOrMaskS2(char[] haystack, int fromIndex, int maxIndex, int needle, int mask) { for (int i = fromIndex; i < maxIndex; i++) { if ((haystack[i] | mask) == needle) { return i; } } return -1; } private static int stubIndexOf2ConsecutiveS1(byte[] haystack, int fromIndex, int maxIndex, int c1, int c2) { for (int i = fromIndex + 1; i < maxIndex; i++) { if (Byte.toUnsignedInt(haystack[i - 1]) == c1 && Byte.toUnsignedInt(haystack[i]) == c2) { return i - 1; } } return -1; } private static int stubIndexOf2ConsecutiveS2(byte[] haystack, int fromIndex, int maxIndex, int c1, int c2) { for (int i = fromIndex + 1; i < maxIndex; i++) { if (readChar(haystack, i - 1) == c1 && readChar(haystack, i) == c2) { return i - 1; } } return -1; } private static int stubIndexOf2ConsecutiveS2(char[] haystack, int fromIndex, int maxIndex, int c1, int c2) { for (int i = fromIndex + 1; i < maxIndex; i++) { if (haystack[i - 1] == c1 && haystack[i] == c2) { return i - 1; } } return -1; } private static int stubIndexOf2ConsecutiveWithOrMaskS1(byte[] haystack, int fromIndex, int maxIndex, int c1, int c2, int mask1, int mask2) { for (int i = fromIndex + 1; i < maxIndex; i++) { if ((Byte.toUnsignedInt(haystack[i - 1]) | mask1) == c1 && (Byte.toUnsignedInt(haystack[i]) | mask2) == c2) { return i - 1; } } return -1; } private static int stubIndexOf2ConsecutiveWithOrMaskS2(byte[] haystack, int fromIndex, int maxIndex, int c1, int c2, int mask1, int mask2) { for (int i = fromIndex + 1; i < maxIndex; i++) { if ((readChar(haystack, i - 1) | mask1) == c1 && (readChar(haystack, i) | mask2) == c2) { return i - 1; } } return -1; } private static int stubIndexOf2ConsecutiveWithOrMaskS2(char[] haystack, int fromIndex, int maxIndex, int c1, int c2, int mask1, int mask2) { for (int i = fromIndex + 1; i < maxIndex; i++) { if ((haystack[i - 1] | mask1) == c1 && (haystack[i] | mask2) == c2) { return i - 1; } } return -1; } private static boolean stubRegionEqualsS1(byte[] a, long offsetA, byte[] b, long offsetB, int length) { for (int i = 0; i < length; i++) { if (a[(int) offsetA + i] != b[(int) offsetB + i]) { return false; } } return true; } private static boolean stubRegionEqualsS2S1(byte[] a, long offsetA, byte[] b, long offsetB, int length) { for (int i = 0; i < length; i++) { if (readChar(a, (int) offsetA + i) != Byte.toUnsignedInt(b[(int) offsetB + i])) { return false; } } return true; } private static boolean stubRegionEqualsS2(char[] a, long offsetA, char[] b, long offsetB, int length) { for (int i = 0; i < length; i++) { if (a[(int) offsetA + i] != b[(int) offsetB + i]) { return false; } } return true; } private static boolean stubRegionEqualsWithOrMaskS1(byte[] a, long offsetA, byte[] b, long offsetB, byte[] mask, int length) { for (int i = 0; i < length; i++) { if ((a[(int) offsetA + i] | mask[i]) != b[(int) offsetB + i]) { return false; } } return true; } private static boolean stubRegionEqualsWithOrMaskS2(char[] a, long offsetA, char[] b, long offsetB, char[] mask, int length) { for (int i = 0; i < length; i++) { if ((a[(int) offsetA + i] | mask[i]) != b[(int) offsetB + i]) { return false; } } return true; } private static boolean stubRegionEqualsWithOrMaskCompactStrings(byte[] a, long offsetA, byte[] b, long offsetB, byte[] mask, int length, boolean compactA, boolean compactB, boolean compactM) { for (int i = 0; i < length; i++) { int vA = compactA ? Byte.toUnsignedInt(a[(int) offsetA + i]) : readChar(a, (int) offsetA + i); int vB = compactB ? Byte.toUnsignedInt(b[(int) offsetB + i]) : readChar(b, (int) offsetB + i); int vM = compactM ? Byte.toUnsignedInt(mask[i]) : readChar(mask, i); if ((vA | vM) != vB) { return false; } } return true; } private static int readChar(byte[] array, int i) { int byte0 = Byte.toUnsignedInt(array[i << 1]); int byte1 = Byte.toUnsignedInt(array[(i << 1) + 1]); return ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN ? byte0 | (byte1 << 8) : (byte0 << 8) | byte1; } private static int runIndexOfS1(byte[] array, int fromIndex, int maxIndex, int... values) { for (int i = fromIndex; i < maxIndex; i++) { for (int v : values) { if (Byte.toUnsignedInt(array[i]) == v) { return i; } } } return -1; } private static int runIndexOfS1(byte[] array, int fromIndex, int maxIndex, byte... values) { for (int i = fromIndex; i < maxIndex; i++) { for (int v : values) { if (array[i] == v) { return i; } } } return -1; } private static int runIndexOfS2(byte[] array, int fromIndex, int maxIndex, int... values) { for (int i = fromIndex; i < maxIndex; i++) { int c = readChar(array, i); for (int v : values) { if (c == v) { return i; } } } return -1; } private static int runIndexOfS2(char[] array, int fromIndex, int maxIndex, int... values) { for (int i = fromIndex; i < maxIndex; i++) { for (int v : values) { if (array[i] == v) { return i; } } } return -1; } private static int runIndexOfS2(char[] array, int fromIndex, int maxIndex, char... values) { for (int i = fromIndex; i < maxIndex; i++) { for (int v : values) { if (array[i] == v) { return i; } } } return -1; } private static int runIndexOfS2(String haystack, int fromIndex, int maxIndex, char[] needle) { for (int i = fromIndex; i < maxIndex; i++) { for (char c : needle) { if (haystack.charAt(i) == c) { return i; } } } return -1; } }
apache/poi
34,582
poi/src/main/java/org/apache/poi/util/LocaleID.java
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.util; import static java.util.Calendar.SATURDAY; import static java.util.Calendar.SUNDAY; import java.util.Calendar; import java.util.Collections; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Locale Collection * <p> * This enum can be used to map between Windows LCID and Java {@link java.util.Locale Locales} * * @see <a href="https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-lcid/70feba9f-294e-491e-b6eb-56532684c37f">[MS-LCID]: Windows Language Code Identifier (LCID) Reference</a> */ @SuppressWarnings("unused") public enum LocaleID { AR(0x0001, "ar", "ar", "Arabic", 1256, SUNDAY), BG(0x0002, "bg", "bg", "Bulgarian", 1251, -1), CA(0x0003, "ca", "ca", "Catalan", 1252, -1), ZH_HANS(0x0004, "zh_hans", "zh-Hans", "Chinese (Simplified)", 936, -1), CS(0x0005, "cs", "cs", "Czech", 1250, -1), DA(0x0006, "da", "da", "Danish", 1252, -1), DE(0x0007, "de", "de", "German", 1252, -1), EL(0x0008, "el", "el", "Greek", 1253, -1), EN(0x0009, "en", "en", "English", 1252, -1), ES(0x000A, "es", "es", "Spanish", 1252, -1), FI(0x000B, "fi", "fi", "Finnish", 1252, -1), FR(0x000C, "fr", "fr", "French", 1252, -1), HE(0x000D, "he", "he", "Hebrew", 1255, SUNDAY), HU(0x000E, "hu", "hu", "Hungarian", 1250, -1), IS(0x000F, "is", "is", "Icelandic", 1252, -1), IT(0x0010, "it", "it", "Italian", 1252, -1), JA(0x0011, "ja", "ja", "Japanese", 932, SUNDAY), KO(0x0012, "ko", "ko", "Korean", 949, SUNDAY), NL(0x0013, "nl", "nl", "Dutch", 1252, -1), NO(0x0014, "no", "no", "Norwegian", 1252, -1), PL(0x0015, "pl", "pl", "Polish", 1250, -1), PT(0x0016, "pt", "pt", "Portuguese", 1252, SUNDAY), RM(0x0017, "rm", "rm", "Romansh", 1252, -1), RO(0x0018, "ro", "ro", "Romanian", 1250, -1), RU(0x0019, "ru", "ru", "Russian", 1251, -1), HR(0x001A, "hr", "hr", "Croatian", 1250, -1), SK(0x001B, "sk", "sk", "Slovak", 1250, -1), SQ(0x001C, "sq", "sq", "Albanian", 1250, -1), SV(0x001D, "sv", "sv", "Swedish", 1252, -1), TH(0x001E, "th", "th", "Thai", 874, -1), TR(0x001F, "tr", "tr", "Turkish", 1254, -1), UR(0x0020, "ur", "ur", "Urdu", 1256, -1), ID(0x0021, "id", "id", "Indonesian", 1252, SUNDAY), UK(0x0022, "uk", "uk", "Ukrainian", 1251, -1), BE(0x0023, "be", "be", "Belarusian", 1251, -1), SL(0x0024, "sl", "sl", "Slovenian", 1250, -1), ET(0x0025, "et", "et", "Estonian", 1257, -1), LV(0x0026, "lv", "lv", "Latvian", 1257, -1), LT(0x0027, "lt", "lt", "Lithuanian", 1257, -1), TG(0x0028, "tg", "tg", "Tajik", 1251, -1), FA(0x0029, "fa", "fa", "Persian", 1256, SATURDAY), VI(0x002A, "vi", "vi", "Vietnamese", 1258, -1), HY(0x002B, "hy", "hy", "Armenian", 0, -1), AZ(0x002C, "az", "az", "Azerbaijani", 1254, -1), EU(0x002D, "eu", "eu", "Basque", 1252, -1), HSB(0x002E, "hsb", "hsb", "Upper Sorbian", 1252, -1), MK(0x002F, "mk", "mk", "Macedonian (FYROM)", 1251, -1), ST(0x0030, "st", "st", "Southern Sotho", 0, -1), TS(0x0031, "ts", "ts", "Tsonga", 0, SUNDAY), TN(0x0032, "tn", "tn", "Setswana", 1252, SUNDAY), VE(0x0033, "ve", "ve", "Venda", 32759, SUNDAY), XH(0x0034, "xh", "xh", "isiXhosa", 1252, SUNDAY), ZU(0x0035, "zu", "zu", "isiZulu", 1252, SUNDAY), AF(0x0036, "af", "af", "Afrikaans", 1252, SUNDAY), KA(0x0037, "ka", "ka", "Georgian", 0, -1), FO(0x0038, "fo", "fo", "Faroese", 1252, -1), HI(0x0039, "hi", "hi", "Hindi", 0, -1), MT(0x003A, "mt", "mt", "Maltese", 0, SUNDAY), SE(0x003B, "se", "se", "Sami (Northern)", 1252, -1), GA(0x003C, "ga", "ga", "Irish", 1252, SUNDAY), YI(0x003D, "yi", "yi", "Yiddish", 32759, -1), MS(0x003E, "ms", "ms", "Malay", 1252, -1), KK(0x003F, "kk", "kk", "Kazakh", 0, -1), KY(0x0040, "ky", "ky", "Kyrgyz", 1251, -1), SW(0x0041, "sw", "sw", "Kiswahili", 1252, SUNDAY), TK(0x0042, "tk", "tk", "Turkmen", 1250, -1), UZ(0x0043, "uz", "uz", "Uzbek", 1254, -1), TT(0x0044, "tt", "tt", "Tatar", 1251, -1), BN(0x0045, "bn", "bn", "Bangla", 0, SUNDAY), PA(0x0046, "pa", "pa", "Punjabi", 0, -1), GU(0x0047, "gu", "gu", "Gujarati", 0, -1), OR(0x0048, "or", "or", "Odia", 0, -1), TA(0x0049, "ta", "ta", "Tamil", 0, -1), TE(0x004A, "te", "te", "Telugu", 0, -1), KN(0x004B, "kn", "kn", "Kannada", 0, -1), ML(0x004C, "ml", "ml", "Malayalam", 0, SUNDAY), AS(0x004D, "as", "as", "Assamese", 0, -1), MR(0x004E, "mr", "mr", "Marathi", 0, -1), SA(0x004F, "sa", "sa", "Sanskrit", 0, SUNDAY), MN(0x0050, "mn", "mn", "Mongolian", 1251, -1), BO(0x0051, "bo", "bo", "Tibetan", 0, -1), CY(0x0052, "cy", "cy", "Welsh", 1252, -1), KM(0x0053, "km", "km", "Khmer", 0, SUNDAY), LO(0x0054, "lo", "lo", "Lao", 0, SUNDAY), MY(0x0055, "my", "my", "Burmese", 0, SUNDAY), GL(0x0056, "gl", "gl", "Galician", 1252, -1), KOK(0x0057, "kok", "kok", "Konkani", 0, -1), MNI(0x0058, "mni", "mni", "Manipuri", 32759, -1), SD(0x0059, "sd", "sd", "Sindhi", 1256, -1), SYR(0x005A, "syr", "syr", "Syriac", 0, SUNDAY), SI(0x005B, "si", "si", "Sinhala", 0, -1), CHR(0x005C, "chr", "chr", "Cherokee", 0, SUNDAY), IU(0x005D, "iu", "iu", "Inuktitut", 1252, SUNDAY), AM(0x005E, "am", "am", "Amharic", 0, SUNDAY), TZM(0x005F, "tzm", "tzm", "Tamazight", 1252, -1), KS(0x0060, "ks", "ks", "Kashmiri", 32759, -1), NE(0x0061, "ne", "ne", "Nepali", 0, SUNDAY), FY(0x0062, "fy", "fy", "Frisian", 1252, -1), PS(0x0063, "ps", "ps", "Pashto", 0, SATURDAY), FIL(0x0064, "fil", "fil", "Filipino", 1252, SUNDAY), DV(0x0065, "dv", "dv", "Divehi", 0, SUNDAY), BIN(0x0066, "bin", "bin", "Edo", 32759, SUNDAY), FF(0x0067, "ff", "ff", "Fulah", 1252, -1), HA(0x0068, "ha", "ha", "Hausa", 1252, -1), IBB(0x0069, "ibb", "ibb", "Ibibio", 32759, SUNDAY), YO(0x006A, "yo", "yo", "Yoruba", 1252, -1), QUZ(0x006B, "quz", "quz", "Quechua", 1252, SUNDAY), NSO(0x006C, "nso", "nso", "Sesotho sa Leboa", 1252, SUNDAY), BA(0x006D, "ba", "ba", "Bashkir", 1251, -1), LB(0x006E, "lb", "lb", "Luxembourgish", 1252, -1), KL(0x006F, "kl", "kl", "Greenlandic", 1252, -1), IG(0x0070, "ig", "ig", "Igbo", 1252, -1), KR(0x0071, "kr", "kr", "Kanuri", 32759, SUNDAY), OM(0x0072, "om", "om", "Oromo", 0, SUNDAY), TI(0x0073, "ti", "ti", "Tigrinya", 0, -1), GN(0x0074, "gn", "gn", "Guarani", 1252, SUNDAY), HAW(0x0075, "haw", "haw", "Hawaiian", 1252, SUNDAY), LA(0x0076, "la", "la", "Latin", 32759, SUNDAY), SO(0x0077, "so", "so", "Somali", 0, -1), II(0x0078, "ii", "ii", "Yi", 0, -1), PAP(0x0079, "pap", "pap", "Papiamento", 32759, -1), ARN(0x007A, "arn", "arn", "Mapudungun", 1252, SUNDAY), INVALID_A(0x007B, "invalid_a", "", "", 32759, -1), MOH(0x007C, "moh", "moh", "Mohawk", 1252, SUNDAY), INVALID_B(0x007D, "invalid_b", "", "", 32759, -1), BR(0x007E, "br", "br", "Breton", 1252, -1), INVALID_C(0x007F, "invalid_c", "", "", 1252, -1), UG(0x0080, "ug", "ug", "Uyghur", 1256, -1), MI(0x0081, "mi", "mi", "Maori", 0, -1), OC(0x0082, "oc", "oc", "Occitan", 1252, -1), CO(0x0083, "co", "co", "Corsican", 1252, -1), GSW(0x0084, "gsw", "gsw", "Alsatian", 1252, -1), SAH(0x0085, "sah", "sah", "Sakha", 1251, -1), QUT(0x0086, "qut", "qut", "Guatemala", 1252, -1), RW(0x0087, "rw", "rw", "Kinyarwanda", 1252, -1), WO(0x0088, "wo", "wo", "Wolof", 1252, -1), INVALID_D(0x0089, "invalid_d", "", "", 32759, -1), INVALID_E(0x008A, "invalid_e", "", "", 32759, -1), INVALID_F(0x008B, "invalid_f", "", "", 32759, -1), PRS(0x008C, "prs", "prs", "Dari", 1256, SATURDAY), INVALID_G(0x008D, "invalid_g", "", "", 32759, -1), INVALID_H(0x008E, "invalid_h", "", "", 32759, -1), INVALID_I(0x008F, "invalid_i", "", "", 32759, -1), INVALID_J(0x0090, "invalid_j", "", "", 32759, -1), GD(0x0091, "gd", "gd", "Scottish Gaelic", 1252, -1), KU(0x0092, "ku", "ku", "Central Kurdish", 1256, SUNDAY), QUC(0x0093, "quc", "quc", "K'iche'", 32759, -1), AR_SA(0x0401, "ar_sa", "ar-SA", "Arabic (Saudi Arabia)", 1256, SUNDAY), BG_BG(0x0402, "bg_bg", "bg-BG", "Bulgarian (Bulgaria)", 1251, -1), CA_ES(0x0403, "ca_es", "ca-ES", "Catalan (Catalan)", 1252, -1), ZH_TW(0x0404, "zh_tw", "zh-TW", "Chinese (Traditional, Taiwan)", 950, SUNDAY), CS_CZ(0x0405, "cs_cz", "cs-CZ", "Czech (Czech Republic)", 1250, -1), DA_DK(0x0406, "da_dk", "da-DK", "Danish (Denmark)", 1252, -1), DE_DE(0x0407, "de_de", "de-DE", "German (Germany)", 1252, -1), EL_GR(0x0408, "el_gr", "el-GR", "Greek (Greece)", 1253, -1), EN_US(0x0409, "en_us", "en-US", "English (United States)", 1252, SUNDAY), ES_ES_TRADNL(0x040A, "es_es_tradnl", "es-ES-tradnl", "Spanish (Spain,tradnl)", 1252, -1), FI_FI(0x040B, "fi_fi", "fi-FI", "Finnish (Finland)", 1252, -1), FR_FR(0x040C, "fr_fr", "fr-FR", "French (France)", 1252, -1), HE_IL(0x040D, "he_il", "he-IL", "Hebrew (Israel)", 1255, SUNDAY), HU_HU(0x040E, "hu_hu", "hu-HU", "Hungarian (Hungary)", 1250, -1), IS_IS(0x040F, "is_is", "is-IS", "Icelandic (Iceland)", 1252, -1), IT_IT(0x0410, "it_it", "it-IT", "Italian (Italy)", 1252, -1), JA_JP(0x0411, "ja_jp", "ja-JP", "Japanese (Japan)", 932, SUNDAY), KO_KR(0x0412, "ko_kr", "ko-KR", "Korean (Korea)", 949, SUNDAY), NL_NL(0x0413, "nl_nl", "nl-NL", "Dutch (Netherlands)", 1252, -1), NB_NO(0x0414, "nb_no", "nb-NO", "Norwegian, Bokm\u00E5l (Norway)", 1252, -1), PL_PL(0x0415, "pl_pl", "pl-PL", "Polish (Poland)", 1250, -1), PT_BR(0x0416, "pt_br", "pt-BR", "Portuguese (Brazil)", 1252, SUNDAY), RM_CH(0x0417, "rm_ch", "rm-CH", "Romansh (Switzerland)", 1252, -1), RO_RO(0x0418, "ro_ro", "ro-RO", "Romanian (Romania)", 1250, -1), RU_RU(0x0419, "ru_ru", "ru-RU", "Russian (Russia)", 1251, -1), HR_HR(0x041A, "hr_hr", "hr-HR", "Croatian (Croatia)", 1250, -1), SK_SK(0x041B, "sk_sk", "sk-SK", "Slovak (Slovakia)", 1250, -1), SQ_AL(0x041C, "sq_al", "sq-AL", "Albanian (Albania)", 1250, -1), SV_SE(0x041D, "sv_se", "sv-SE", "Swedish (Sweden)", 1252, -1), TH_TH(0x041E, "th_th", "th-TH", "Thai (Thailand)", 874, -1), TR_TR(0x041F, "tr_tr", "tr-TR", "Turkish (Turkey)", 1254, -1), UR_PK(0x0420, "ur_pk", "ur-PK", "Urdu (Islamic Republic of Pakistan)", 1256, -1), ID_ID(0x0421, "id_id", "id-ID", "Indonesian (Indonesia)", 1252, SUNDAY), UK_UA(0x0422, "uk_ua", "uk-UA", "Ukrainian (Ukraine)", 1251, -1), BE_BY(0x0423, "be_by", "be-BY", "Belarusian (Belarus)", 1251, -1), SL_SI(0x0424, "sl_si", "sl-SI", "Slovenian (Slovenia)", 1250, -1), ET_EE(0x0425, "et_ee", "et-EE", "Estonian (Estonia)", 1257, -1), LV_LV(0x0426, "lv_lv", "lv-LV", "Latvian (Latvia)", 1257, -1), LT_LT(0x0427, "lt_lt", "lt-LT", "Lithuanian (Lithuania)", 1257, -1), TG_CYRL_TJ(0x0428, "tg_cyrl_tj", "tg-Cyrl-TJ", "Tajik (Cyrillic, Tajikistan)", 1251, -1), FA_IR(0x0429, "fa_ir", "fa-IR", "Persian (Iran)", 1256, SATURDAY), VI_VN(0x042A, "vi_vn", "vi-VN", "Vietnamese (Vietnam)", 1258, -1), HY_AM(0x042B, "hy_am", "hy-AM", "Armenian (Armenia)", 0, -1), AZ_LATN_AZ(0x042C, "az_latn_az", "az-Latn-AZ", "Azerbaijani (Latin, Azerbaijan)", 1254, -1), EU_ES(0x042D, "eu_es", "eu-ES", "Basque (Basque)", 1252, -1), HSB_DE(0x042E, "hsb_de", "hsb-DE", "Upper Sorbian (Germany)", 1252, -1), MK_MK(0x042F, "mk_mk", "mk-MK", "Macedonian (Former Yugoslav Republic of Macedonia)", 1251, -1), ST_ZA(0x0430, "st_za", "st-ZA", "Southern Sotho (South Africa)", 0, -1), TS_ZA(0x0431, "ts_za", "ts-ZA", "Tsonga (South Africa)", 0, -1), TN_ZA(0x0432, "tn_za", "tn-ZA", "Setswana (South Africa)", 1252, SUNDAY), VE_ZA(0x0433, "ve_za", "ve-ZA", "Venda (South Africa)", 32759, SUNDAY), XH_ZA(0x0434, "xh_za", "xh-ZA", "isiXhosa (South Africa)", 1252, SUNDAY), ZU_ZA(0x0435, "zu_za", "zu-ZA", "isiZulu (South Africa)", 1252, SUNDAY), AF_ZA(0x0436, "af_za", "af-ZA", "Afrikaans (South Africa)", 1252, SUNDAY), KA_GE(0x0437, "ka_ge", "ka-GE", "Georgian (Georgia)", 0, -1), FO_FO(0x0438, "fo_fo", "fo-FO", "Faroese (Faroe Islands)", 1252, -1), HI_IN(0x0439, "hi_in", "hi-IN", "Hindi (India)", 0, -1), MT_MT(0x043A, "mt_mt", "mt-MT", "Maltese (Malta)", 0, SUNDAY), SE_NO(0x043B, "se_no", "se-NO", "Sami, Northern (Norway)", 1252, -1), YI_HEBR(0x043D, "yi_hebr", "yi-Hebr", "Yiddish (Hebrew)", 32759, -1), MS_MY(0x043E, "ms_my", "ms-MY", "Malay (Malaysia)", 1252, -1), KK_KZ(0x043F, "kk_kz", "kk-KZ", "Kazakh (Kazakhstan)", 0, -1), KY_KG(0x0440, "ky_kg", "ky-KG", "Kyrgyz (Kyrgyzstan)", 1251, -1), SW_KE(0x0441, "sw_ke", "sw-KE", "Kiswahili (Kenya)", 1252, SUNDAY), TK_TM(0x0442, "tk_tm", "tk-TM", "Turkmen (Turkmenistan)", 1250, -1), UZ_LATN_UZ(0x0443, "uz_latn_uz", "uz-Latn-UZ", "Uzbek (Latin, Uzbekistan)", 1254, -1), TT_RU(0x0444, "tt_ru", "tt-RU", "Tatar (Russia)", 1251, -1), BN_IN(0x0445, "bn_in", "bn-IN", "Bangla (India)", 0, -1), PA_IN(0x0446, "pa_in", "pa-IN", "Punjabi (India)", 0, -1), GU_IN(0x0447, "gu_in", "gu-IN", "Gujarati (India)", 0, -1), OR_IN(0x0448, "or_in", "or-IN", "Odia (India)", 0, -1), TA_IN(0x0449, "ta_in", "ta-IN", "Tamil (India)", 0, -1), TE_IN(0x044A, "te_in", "te-IN", "Telugu (India)", 0, -1), KN_IN(0x044B, "kn_in", "kn-IN", "Kannada (India)", 0, -1), ML_IN(0x044C, "ml_in", "ml-IN", "Malayalam (India)", 0, SUNDAY), AS_IN(0x044D, "as_in", "as-IN", "Assamese (India)", 0, -1), MR_IN(0x044E, "mr_in", "mr-IN", "Marathi (India)", 0, -1), SA_IN(0x044F, "sa_in", "sa-IN", "Sanskrit (India)", 0, SUNDAY), MN_MN(0x0450, "mn_mn", "mn-MN", "Mongolian (Cyrillic, Mongolia)", 1251, -1), BO_CN(0x0451, "bo_cn", "bo-CN", "Tibetan (PRC)", 0, -1), CY_GB(0x0452, "cy_gb", "cy-GB", "Welsh (United Kingdom)", 1252, -1), KM_KH(0x0453, "km_kh", "km-KH", "Khmer (Cambodia)", 0, SUNDAY), LO_LA(0x0454, "lo_la", "lo-LA", "Lao (Lao P.D.R.)", 0, SUNDAY), MY_MM(0x0455, "my_mm", "my-MM", "Burmese (Myanmar)", 0, SUNDAY), GL_ES(0x0456, "gl_es", "gl-ES", "Galician (Galician)", 1252, -1), KOK_IN(0x0457, "kok_in", "kok-IN", "Konkani (India)", 0, -1), MNI_IN(0x0458, "mni_in", "mni-IN", "Manipuri (India)", 32759, -1), SD_DEVA_IN(0x0459, "sd_deva_in", "sd-Deva-IN", "Sindhi (Devanagari, India)", 32759, SUNDAY), SYR_SY(0x045A, "syr_sy", "syr-SY", "Syriac (Syria)", 0, SUNDAY), SI_LK(0x045B, "si_lk", "si-LK", "Sinhala (Sri Lanka)", 0, -1), CHR_CHER_US(0x045C, "chr_cher_us", "chr-Cher-US", "Cherokee (Cherokee)", 0, SUNDAY), IU_CANS_CA(0x045D, "iu_cans_ca", "iu-Cans-CA", "Inuktitut (Syllabics, Canada)", 0, SUNDAY), AM_ET(0x045E, "am_et", "am-ET", "Amharic (Ethiopia)", 0, SUNDAY), TZM_ARAB_MA(0x045F, "tzm_arab_ma", "tzm-Arab-MA", "Central Atlas Tamazight (Arabic, Morocco)", 32759, SATURDAY), KS_ARAB(0x0460, "ks_arab", "ks-Arab", "Kashmiri (Perso-Arabic)", 32759, SUNDAY), NE_NP(0x0461, "ne_np", "ne-NP", "Nepali (Nepal)", 0, SUNDAY), FY_NL(0x0462, "fy_nl", "fy-NL", "Frisian (Netherlands)", 1252, -1), PS_AF(0x0463, "ps_af", "ps-AF", "Pashto (Afghanistan)", 0, SATURDAY), FIL_PH(0x0464, "fil_ph", "fil-PH", "Filipino (Philippines)", 1252, SUNDAY), DV_MV(0x0465, "dv_mv", "dv-MV", "Divehi (Maldives)", 0, SUNDAY), BIN_NG(0x0466, "bin_ng", "bin-NG", "Edo (Nigeria)", 32759, SUNDAY), FUV_NG(0x0467, "fuv_ng", "fuv-NG", "fuv (Nigeria)", 32759, -1), HA_LATN_NG(0x0468, "ha_latn_ng", "ha-Latn-NG", "Hausa (Latin, Nigeria)", 1252, -1), IBB_NG(0x0469, "ibb_ng", "ibb-NG", "Ibibio (Nigeria)", 32759, SUNDAY), YO_NG(0x046A, "yo_ng", "yo-NG", "Yoruba (Nigeria)", 1252, -1), QUZ_BO(0x046B, "quz_bo", "quz-BO", "Quechua (Bolivia)", 1252, SUNDAY), NSO_ZA(0x046C, "nso_za", "nso-ZA", "Sesotho sa Leboa (South Africa)", 1252, SUNDAY), BA_RU(0x046D, "ba_ru", "ba-RU", "Bashkir (Russia)", 1251, -1), LB_LU(0x046E, "lb_lu", "lb-LU", "Luxembourgish (Luxembourg)", 1252, -1), KL_GL(0x046F, "kl_gl", "kl-GL", "Greenlandic (Greenland)", 1252, -1), IG_NG(0x0470, "ig_ng", "ig-NG", "Igbo (Nigeria)", 1252, -1), KR_NG(0x0471, "kr_ng", "kr-NG", "Kanuri (Nigeria)", 32759, SUNDAY), OM_ET(0x0472, "om_et", "om-ET", "Oromo (Ethiopia)", 0, SUNDAY), TI_ET(0x0473, "ti_et", "ti-ET", "Tigrinya (Ethiopia)", 0, SUNDAY), GN_PY(0x0474, "gn_py", "gn-PY", "Guarani (Paraguay)", 1252, SUNDAY), HAW_US(0x0475, "haw_us", "haw-US", "Hawaiian (United States)", 1252, SUNDAY), LA_LATN(0x0476, "la_latn", "la-Latn", "Latin (Latin)", 32759, -1), SO_SO(0x0477, "so_so", "so-SO", "Somali (Somalia)", 0, -1), II_CN(0x0478, "ii_cn", "ii-CN", "Yi (PRC)", 0, -1), PAP_029(0x0479, "pap_029", "pap-029", "Papiamento (Caribbean)", 32759, -1), ARN_CL(0x047A, "arn_cl", "arn-CL", "Mapudungun (Chile)", 1252, SUNDAY), MOH_CA(0x047C, "moh_ca", "moh-CA", "Mohawk (Mohawk)", 1252, SUNDAY), BR_FR(0x047E, "br_fr", "br-FR", "Breton (France)", 1252, -1), UG_CN(0x0480, "ug_cn", "ug-CN", "Uyghur (PRC)", 1256, -1), MI_NZ(0x0481, "mi_nz", "mi-NZ", "Maori (New Zealand)", 0, -1), OC_FR(0x0482, "oc_fr", "oc-FR", "Occitan (France)", 1252, -1), CO_FR(0x0483, "co_fr", "co-FR", "Corsican (France)", 1252, -1), GSW_FR(0x0484, "gsw_fr", "gsw-FR", "Alsatian (France)", 1252, -1), SAH_RU(0x0485, "sah_ru", "sah-RU", "Sakha (Russia)", 1251, -1), QUT_GT(0x0486, "qut_gt", "qut-GT", "qut (Guatemala)", 1252, -1), RW_RW(0x0487, "rw_rw", "rw-RW", "Kinyarwanda (Rwanda)", 1252, -1), WO_SN(0x0488, "wo_sn", "wo-SN", "Wolof (Senegal)", 1252, -1), PRS_AF(0x048C, "prs_af", "prs-AF", "Dari (Afghanistan)", 1256, SATURDAY), PLT_MG(0x048D, "plt_mg", "plt-MG", "plt (Madagascar)", 32759, -1), ZH_YUE_HK(0x048E, "zh_yue_hk", "yue-HK", "yue (Hong Kong)", 32759, -1), TDD_TALE_CN(0x048F, "tdd_tale_cn", "tdd-Tale-CN", "tdd (Tai Le,China)", 32759, -1), KHB_TALU_CN(0x0490, "khb_talu_cn", "khb-Talu-CN", "khb (New Tai Lue,China)", 32759, -1), GD_GB(0x0491, "gd_gb", "gd-GB", "Scottish Gaelic (United Kingdom)", 1252, -1), KU_ARAB_IQ(0x0492, "ku_arab_iq", "ku-Arab-IQ", "Central Kurdish (Iraq)", 1256, SUNDAY), QUC_CO(0x0493, "quc_co", "quc-CO", "quc (Colombia)", 32759, -1), QPS_PLOC(0x0501, "qps_ploc", "qps-Ploc", "qps (Ploc)", 1250, -1), QPS_PLOCA(0x05FE, "qps_ploca", "qps-ploca", "qps (ploca)", 932, -1), AR_IQ(0x0801, "ar_iq", "ar-IQ", "Arabic (Iraq)", 1256, SATURDAY), CA_ES_VALENCIA(0x0803, "ca_es_valencia", "ca-ES-valencia", "Valencian (Spain)", 1252, -1), ZH_CN(0x0804, "zh_cn", "zh-CN", "Chinese (Simplified, PRC)", 936, -1), DE_CH(0x0807, "de_ch", "de-CH", "German (Switzerland)", 1252, -1), EN_GB(0x0809, "en_gb", "en-GB", "English (United Kingdom)", 1252, -1), ES_MX(0x080A, "es_mx", "es-MX", "Spanish (Mexico)", 1252, SUNDAY), FR_BE(0x080C, "fr_be", "fr-BE", "French (Belgium)", 1252, -1), IT_CH(0x0810, "it_ch", "it-CH", "Italian (Switzerland)", 1252, -1), JA_PLOC_JP(0x0811, "ja_ploc_jp", "ja-Ploc-JP", "Japanese (Ploc,Japan)", 32759, -1), NL_BE(0x0813, "nl_be", "nl-BE", "Dutch (Belgium)", 1252, -1), NN_NO(0x0814, "nn_no", "nn-NO", "Norwegian, Nynorsk (Norway)", 1252, -1), PT_PT(0x0816, "pt_pt", "pt-PT", "Portuguese (Portugal)", 1252, SUNDAY), RO_MD(0x0818, "ro_md", "ro-MD", "Romanian (Moldova)", 0, -1), RU_MD(0x0819, "ru_md", "ru-MD", "Russian (Moldova)", 32759, -1), SR_LATN_CS(0x081A, "sr_latn_cs", "sr-Latn-CS", "Serbian (Latin,Serbia and Montenegro)", 1250, -1), SV_FI(0x081D, "sv_fi", "sv-FI", "Swedish (Finland)", 1252, -1), UR_IN(0x0820, "ur_in", "ur-IN", "Urdu (India)", 0, -1), INVALID_K(0x0827, "invalid_k", "", "", 32759, -1), AZ_CYRL_AZ(0x082C, "az_cyrl_az", "az-Cyrl-AZ", "Azerbaijani (Cyrillic, Azerbaijan)", 1251, -1), DSB_DE(0x082E, "dsb_de", "dsb-DE", "Lower Sorbian (Germany)", 1252, -1), TN_BW(0x0832, "tn_bw", "tn-BW", "Setswana (Botswana)", 1252, SUNDAY), SE_SE(0x083B, "se_se", "se-SE", "Sami, Northern (Sweden)", 1252, -1), GA_IE(0x083C, "ga_ie", "ga-IE", "Irish (Ireland)", 1252, SUNDAY), MS_BN(0x083E, "ms_bn", "ms-BN", "Malay (Brunei Darussalam)", 1252, -1), UZ_CYRL_UZ(0x0843, "uz_cyrl_uz", "uz-Cyrl-UZ", "Uzbek (Cyrillic, Uzbekistan)", 1251, -1), BN_BD(0x0845, "bn_bd", "bn-BD", "Bangla (Bangladesh)", 0, SUNDAY), PA_ARAB_PK(0x0846, "pa_arab_pk", "pa-Arab-PK", "Punjabi (Islamic Republic of Pakistan)", 1256, -1), TA_LK(0x0849, "ta_lk", "ta-LK", "Tamil (Sri Lanka)", 0, -1), MN_MONG_CN(0x0850, "mn_mong_cn", "mn-Mong-CN", "Mongolian (Traditional Mongolian, PRC)", 0, -1), BO_BT(0x0851, "bo_bt", "bo-BT", "Tibetan (Bhutan)", 32759, -1), SD_ARAB_PK(0x0859, "sd_arab_pk", "sd-Arab-PK", "Sindhi (Islamic Republic of Pakistan)", 1256, -1), IU_LATN_CA(0x085D, "iu_latn_ca", "iu-Latn-CA", "Inuktitut (Latin, Canada)", 1252, SUNDAY), TZM_LATN_DZ(0x085F, "tzm_latn_dz", "tzm-Latn-DZ", "Tamazight (Latin, Algeria)", 1252, -1), KS_DEVA(0x0860, "ks_deva", "ks-Deva", "Kashmiri (Devanagari)", 32759, -1), NE_IN(0x0861, "ne_in", "ne-IN", "Nepali (India)", 0, SUNDAY), FF_LATN_SN(0x0867, "ff_latn_sn", "ff-Latn-SN", "Fulah (Latin, Senegal)", 1252, -1), QUZ_EC(0x086B, "quz_ec", "quz-EC", "Quechua (Ecuador)", 1252, SUNDAY), TI_ER(0x0873, "ti_er", "ti-ER", "Tigrinya (Eritrea)", 0, -1), QPS_PLOCM(0x09FF, "qps_plocm", "qps-plocm", "qps (plocm)", 1256, -1), AR_EG(0x0C01, "ar_eg", "ar-EG", "Arabic (Egypt)", 1256, SATURDAY), ZH_HK(0x0C04, "zh_hk", "zh-HK", "Chinese (Traditional, Hong Kong S.A.R.)", 950, SUNDAY), DE_AT(0x0C07, "de_at", "de-AT", "German (Austria)", 1252, -1), EN_AU(0x0C09, "en_au", "en-AU", "English (Australia)", 1252, -1), ES_ES(0x0C0A, "es_es", "es-ES", "Spanish (Spain)", 1252, -1), FR_CA(0x0C0C, "fr_ca", "fr-CA", "French (Canada)", 1252, SUNDAY), SR_CYRL_CS(0x0C1A, "sr_cyrl_cs", "sr-Cyrl-CS", "Serbian (Cyrillic,Serbia and Montenegro)", 1251, -1), SE_FI(0x0C3B, "se_fi", "se-FI", "Sami, Northern (Finland)", 1252, -1), MN_MONG_MN(0x0C50, "mn_mong_mn", "mn-Mong-MN", "Mongolian (Traditional Mongolian, Mongolia)", 0, -1), DZ_BT(0x0C51, "dz_bt", "dz-BT", "Dzongkha (Bhutan)", 0, SUNDAY), TMZ_MA(0x0C5F, "tmz_ma", "tmz-MA", "tmz (Morocco)", 32759, -1), QUZ_PE(0x0C6b, "quz_pe", "quz-PE", "Quechua (Peru)", 1252, -1), AR_LY(0x1001, "ar_ly", "ar-LY", "Arabic (Libya)", 1256, SATURDAY), ZH_SG(0x1004, "zh_sg", "zh-SG", "Chinese (Simplified, Singapore)", 936, SUNDAY), DE_LU(0x1007, "de_lu", "de-LU", "German (Luxembourg)", 1252, -1), EN_CA(0x1009, "en_ca", "en-CA", "English (Canada)", 1252, SUNDAY), ES_GT(0x100A, "es_gt", "es-GT", "Spanish (Guatemala)", 1252, SUNDAY), FR_CH(0x100C, "fr_ch", "fr-CH", "French (Switzerland)", 1252, -1), HR_BA(0x101A, "hr_ba", "hr-BA", "Croatian (Latin, Bosnia and Herzegovina)", 1250, -1), SMJ_NO(0x103B, "smj_no", "smj-NO", "Sami, Lule (Norway)", 1252, -1), TZM_TFNG_MA(0x105F, "tzm_tfng_ma", "tzm-Tfng-MA", "Central Atlas Tamazight (Tifinagh, Morocco)", 0, SATURDAY), AR_DZ(0x1401, "ar_dz", "ar-DZ", "Arabic (Algeria)", 1256, SATURDAY), ZH_MO(0x1404, "zh_mo", "zh-MO", "Chinese (Traditional, Macao S.A.R.)", 950, SUNDAY), DE_LI(0x1407, "de_li", "de-LI", "German (Liechtenstein)", 1252, -1), EN_NZ(0x1409, "en_nz", "en-NZ", "English (New Zealand)", 1252, SUNDAY), ES_CR(0x140A, "es_cr", "es-CR", "Spanish (Costa Rica)", 1252, -1), FR_LU(0x140C, "fr_lu", "fr-LU", "French (Luxembourg)", 1252, -1), BS_LATN_BA(0x141A, "bs_latn_ba", "bs-Latn-BA", "Bosnian (Latin, Bosnia and Herzegovina)", 1250, -1), SMJ_SE(0x143B, "smj_se", "smj-SE", "Sami, Lule (Sweden)", 1252, -1), AR_MA(0x1801, "ar_ma", "ar-MA", "Arabic (Morocco)", 1256, -1), EN_IE(0x1809, "en_ie", "en-IE", "English (Ireland)", 1252, SUNDAY), ES_PA(0x180A, "es_pa", "es-PA", "Spanish (Panama)", 1252, SUNDAY), FR_MC(0x180C, "fr_mc", "fr-MC", "French (Monaco)", 1252, -1), SR_LATN_BA(0x181A, "sr_latn_ba", "sr-Latn-BA", "Serbian (Latin, Bosnia and Herzegovina)", 1250, -1), SMA_NO(0x183B, "sma_no", "sma-NO", "Sami, Southern (Norway)", 1252, -1), AR_TN(0x1C01, "ar_tn", "ar-TN", "Arabic (Tunisia)", 1256, -1), EN_ZA(0x1C09, "en_za", "en-ZA", "English (South Africa)", 1252, SUNDAY), ES_DO(0x1C0A, "es_do", "es-DO", "Spanish (Dominican Republic)", 1252, SUNDAY), INVALID_L(0x1C0C, "invalid_l", "", "", 32759, -1), SR_CYRL_BA(0x1C1A, "sr_cyrl_ba", "sr-Cyrl-BA", "Serbian (Cyrillic, Bosnia and Herzegovina)", 1251, -1), SMA_SE(0x1C3B, "sma_se", "sma-SE", "Sami, Southern (Sweden)", 1252, -1), AR_OM(0x2001, "ar_om", "ar-OM", "Arabic (Oman)", 1256, SUNDAY), INVALID_M(0x2008, "invalid_m", "", "", 32759, -1), EN_JM(0x2009, "en_jm", "en-JM", "English (Jamaica)", 1252, SUNDAY), ES_VE(0x200A, "es_ve", "es-VE", "Spanish (Venezuela)", 1252, -1), FR_RE(0x200C, "fr_re", "fr-RE", "French (Reunion)", 0, -1), BS_CYRL_BA(0x201A, "bs_cyrl_ba", "bs-Cyrl-BA", "Bosnian (Cyrillic, Bosnia and Herzegovina)", 1251, -1), SMS_FI(0x203B, "sms_fi", "sms-FI", "Sami, Skolt (Finland)", 1252, -1), AR_YE(0x2401, "ar_ye", "ar-YE", "Arabic (Yemen)", 1256, SATURDAY), EN_029(0x2409, "en_029", "en-029", "English (Caribbean)", 1252, -1), ES_CO(0x240A, "es_co", "es-CO", "Spanish (Colombia)", 1252, SUNDAY), FR_CD(0x240C, "fr_cd", "fr-CD", "French (Congo DRC)", 0, -1), SR_LATN_RS(0x241A, "sr_latn_rs", "sr-Latn-RS", "Serbian (Latin, Serbia)", 1250, -1), SMN_FI(0x243B, "smn_fi", "smn-FI", "Sami, Inari (Finland)", 1252, -1), AR_SY(0x2801, "ar_sy", "ar-SY", "Arabic (Syria)", 1256, SATURDAY), EN_BZ(0x2809, "en_bz", "en-BZ", "English (Belize)", 1252, SUNDAY), ES_PE(0x280A, "es_pe", "es-PE", "Spanish (Peru)", 1252, SUNDAY), FR_SN(0x280C, "fr_sn", "fr-SN", "French (Senegal)", 0, -1), SR_CYRL_RS(0x281A, "sr_cyrl_rs", "sr-Cyrl-RS", "Serbian (Cyrillic, Serbia)", 1251, -1), AR_JO(0x2C01, "ar_jo", "ar-JO", "Arabic (Jordan)", 1256, SATURDAY), EN_TT(0x2C09, "en_tt", "en-TT", "English (Trinidad and Tobago)", 1252, SUNDAY), ES_AR(0x2C0A, "es_ar", "es-AR", "Spanish (Argentina)", 1252, SUNDAY), FR_CM(0x2C0C, "fr_cm", "fr-CM", "French (Cameroon)", 0, -1), SR_LATN_ME(0x2C1A, "sr_latn_me", "sr-Latn-ME", "Serbian (Latin, Montenegro)", 1250, -1), AR_LB(0x3001, "ar_lb", "ar-LB", "Arabic (Lebanon)", 1256, -1), EN_ZW(0x3009, "en_zw", "en-ZW", "English (Zimbabwe)", 1252, SUNDAY), ES_EC(0x300A, "es_ec", "es-EC", "Spanish (Ecuador)", 1252, -1), FR_CI(0x300C, "fr_ci", "fr-CI", "French (C\u00F4te d\u2019Ivoire)", 0, -1), SR_CYRL_ME(0x301A, "sr_cyrl_me", "sr-Cyrl-ME", "Serbian (Cyrillic, Montenegro)", 1251, -1), AR_KW(0x3401, "ar_kw", "ar-KW", "Arabic (Kuwait)", 1256, SATURDAY), EN_PH(0x3409, "en_ph", "en-PH", "English (Philippines)", 1252, SUNDAY), ES_CL(0x340A, "es_cl", "es-CL", "Spanish (Chile)", 1252, -1), FR_ML(0x340C, "fr_ml", "fr-ML", "French (Mali)", 0, -1), AR_AE(0x3801, "ar_ae", "ar-AE", "Arabic (U.A.E.)", 1256, SATURDAY), EN_ID(0x3809, "en_id", "en-ID", "English (Indonesia)", 32759, SUNDAY), ES_UY(0x380A, "es_uy", "es-UY", "Spanish (Uruguay)", 1252, -1), FR_MA(0x380C, "fr_ma", "fr-MA", "French (Morocco)", 0, SATURDAY), AR_BH(0x3c01, "ar_bh", "ar-BH", "Arabic (Bahrain)", 1256, SATURDAY), EN_HK(0x3c09, "en_hk", "en-HK", "English (Hong Kong SAR)", 0, SUNDAY), ES_PY(0x3c0A, "es_py", "es-PY", "Spanish (Paraguay)", 1252, SUNDAY), FR_HT(0x3c0C, "fr_ht", "fr-HT", "French (Haiti)", 0, -1), AR_QA(0x4001, "ar_qa", "ar-QA", "Arabic (Qatar)", 1256, SATURDAY), EN_IN(0x4009, "en_in", "en-IN", "English (India)", 1252, -1), ES_BO(0x400A, "es_bo", "es-BO", "Spanish (Bolivia)", 1252, -1), AR_PLOC_SA(0x4401, "ar_ploc_sa", "ar-Ploc-SA", "Arabic (Ploc,Saudi Arabia)", 32759, -1), EN_MY(0x4409, "en_my", "en-MY", "English (Malaysia)", 1252, SUNDAY), ES_SV(0x440A, "es_sv", "es-SV", "Spanish (El Salvador)", 1252, SUNDAY), AR_145(0x4801, "ar_145", "ar-145", "Arabic (Western Asia)", 32759, -1), EN_SG(0x4809, "en_sg", "en-SG", "English (Singapore)", 1252, SUNDAY), ES_HN(0x480A, "es_hn", "es-HN", "Spanish (Honduras)", 1252, SUNDAY), EN_AE(0x4C09, "en_ae", "en-AE", "English (United Arab Emirates)", 32759, -1), ES_NI(0x4C0A, "es_ni", "es-NI", "Spanish (Nicaragua)", 1252, SUNDAY), EN_BH(0x5009, "en_bh", "en-BH", "English (Bahrain)", 32759, -1), ES_PR(0x500A, "es_pr", "es-PR", "Spanish (Puerto Rico)", 1252, SUNDAY), EN_EG(0x5409, "en_eg", "en-EG", "English (Egypt)", 32759, -1), ES_US(0x540A, "es_us", "es-US", "Spanish (United States)", 1252, SUNDAY), EN_JO(0x5809, "en_jo", "en-JO", "English (Jordan)", 32759, -1), ES_419(0x580A, "es_419", "es-419", "Spanish (Latin America)", 0, -1), EN_KW(0x5C09, "en_kw", "en-KW", "English (Kuwait)", 32759, -1), ES_CU(0x5C0A, "es_cu", "es-CU", "Spanish (Cuba)", 0, -1), EN_TR(0x6009, "en_tr", "en-TR", "English (Turkey)", 32759, -1), EN_YE(0x6409, "en_ye", "en-YE", "English (Yemen)", 32759, -1), BS_CYRL(0x641A, "bs_cyrl", "bs-Cyrl", "Bosnian (Cyrillic)", 1251, -1), BS_LATN(0x681A, "bs_latn", "bs-Latn", "Bosnian (Latin)", 1250, -1), SR_CYRL(0x6C1A, "sr_cyrl", "sr-Cyrl", "Serbian (Cyrillic)", 1251, -1), SR_LATN(0x701A, "sr_latn", "sr-Latn", "Serbian (Latin)", 1250, -1), SMN(0x703B, "smn", "smn", "Sami (Inari)", 1252, -1), AZ_CYRL(0x742C, "az_cyrl", "az-Cyrl", "Azerbaijani (Cyrillic)", 1251, -1), SMS(0x743B, "sms", "sms", "Sami (Skolt)", 1252, -1), ZH(0x7804, "zh", "zh", "Chinese", 936, -1), NN(0x7814, "nn", "nn", "Norwegian (Nynorsk)", 1252, -1), BS(0x781A, "bs", "bs", "Bosnian", 1250, -1), AZ_LATN(0x782C, "az_latn", "az-Latn", "Azerbaijani (Latin)", 1254, -1), SMA(0x783B, "sma", "sma", "Sami (Southern)", 1252, -1), UZ_CYRL(0x7843, "uz_cyrl", "uz-Cyrl", "Uzbek (Cyrillic)", 1251, -1), MN_CYRL(0x7850, "mn_cyrl", "mn-Cyrl", "Mongolian (Cyrillic)", 1251, -1), IU_CANS(0x785D, "iu_cans", "iu-Cans", "Inuktitut (Syllabics)", 0, SUNDAY), TZM_TFNG(0x785F, "tzm_tfng", "tzm-Tfng", "Tamazight (Tifinagh)", 0, -1), ZH_HANT(0x7C04, "zh_hant", "zh-Hant", "Chinese (Traditional)", 950, SUNDAY), NB(0x7C14, "nb", "nb", "Norwegian (Bokm\u00E5l)", 1252, -1), SR(0x7C1A, "sr", "sr", "Serbian", 1250, -1), TG_CYRL(0x7C28, "tg_cyrl", "tg-Cyrl", "Tajik (Cyrillic)", 1251, -1), DSB(0x7C2E, "dsb", "dsb", "Lower Sorbian", 1252, -1), SMJ(0x7C3B, "smj", "smj", "Sami (Lule)", 1252, -1), UZ_LATN(0x7C43, "uz_latn", "uz-Latn", "Uzbek (Latin)", 1254, -1), PA_ARAB(0x7C46, "pa_arab", "pa-Arab", "Punjabi (Arabic)", 1256, -1), MN_MONG(0x7C50, "mn_mong", "mn-Mong", "Mongolian (Traditional Mongolian)", 0, -1), SD_ARAB(0x7C59, "sd_arab", "sd-Arab", "Sindhi (Arabic)", 1256, -1), CHR_CHER(0x7C5C, "chr_cher", "chr-Cher", "Cherokee (Cherokee)", 0, SUNDAY), IU_LATN(0x7C5D, "iu_latn", "iu-Latn", "Inuktitut (Latin)", 1252, SUNDAY), TZM_LATN(0x7C5F, "tzm_latn", "tzm-Latn", "Tamazight (Latin)", 1252, -1), FF_LATN(0x7C67, "ff_latn", "ff-Latn", "Fulah (Latin)", 1252, -1), HA_LATN(0x7C68, "ha_latn", "ha-Latn", "Hausa (Latin)", 1252, -1), KU_ARAB(0x7C92, "ku_arab", "ku-Arab", "Central Kurdish (Arabic)", 1256, -1), INVALID_N(0xF2EE, "invalid_n", "", "", 0, -1), INVALID_O(0xEEEE, "invalid_o", "", "", 0, -1), ; private final int lcid; private final String windowsId; private final String languageTag; private final String description; private final int defaultCodepage; private final int firstWeekday; private static final Map<String, LocaleID> languageTagLookup = Collections.unmodifiableMap( Stream.of(values()).filter(LocaleID::isValid) .collect(Collectors.toMap(LocaleID::getLanguageTag, Function.identity()))); private static final Map<Integer, LocaleID> lcidLookup = Collections.unmodifiableMap( Stream.of(values()).collect(Collectors.toMap(LocaleID::getLcid, Function.identity()))); LocaleID(int lcid, String windowsId, String languageTag, String description, int defaultCodepage, int firstWeekday) { this.lcid = lcid; this.windowsId = windowsId; this.languageTag = languageTag; this.description = description; this.defaultCodepage = defaultCodepage; this.firstWeekday = (firstWeekday == -1) ? Calendar.MONDAY : firstWeekday; } public int getLcid() { return lcid; } public String getWindowsId() { return windowsId; } public String getLanguageTag() { return languageTag; } public String getDescription() { return description; } public int getDefaultCodepage() { return defaultCodepage; } public int getFirstWeekday() { return firstWeekday; } private boolean isValid() { return !languageTag.isEmpty(); } /** * Lookup via the Java language tag / locale display name * * @param languageTag the locale display name * @return if found the LocaleId, otherwise {@code null} */ public static LocaleID lookupByLanguageTag(String languageTag) { return languageTagLookup.get(languageTag); } /** * Lookup via the Windows LCID * * @param lcid the language code id (LCID) * @return if found the LocaleId, otherwise {@code null} */ public static LocaleID lookupByLcid(int lcid) { return lcidLookup.get(lcid); } }
googleapis/google-cloud-java
36,664
java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/ListApiSpecRevisionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apigeeregistry/v1/registry_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apigeeregistry.v1; /** * * * <pre> * Response message for ListApiSpecRevisionsResponse. * </pre> * * Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse} */ public final class ListApiSpecRevisionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse) ListApiSpecRevisionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListApiSpecRevisionsResponse.newBuilder() to construct. private ListApiSpecRevisionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListApiSpecRevisionsResponse() { apiSpecs_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListApiSpecRevisionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apigeeregistry.v1.RegistryServiceProto .internal_static_google_cloud_apigeeregistry_v1_ListApiSpecRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apigeeregistry.v1.RegistryServiceProto .internal_static_google_cloud_apigeeregistry_v1_ListApiSpecRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse.class, com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse.Builder.class); } public static final int API_SPECS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.apigeeregistry.v1.ApiSpec> apiSpecs_; /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.apigeeregistry.v1.ApiSpec> getApiSpecsList() { return apiSpecs_; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.apigeeregistry.v1.ApiSpecOrBuilder> getApiSpecsOrBuilderList() { return apiSpecs_; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ @java.lang.Override public int getApiSpecsCount() { return apiSpecs_.size(); } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ @java.lang.Override public com.google.cloud.apigeeregistry.v1.ApiSpec getApiSpecs(int index) { return apiSpecs_.get(index); } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ @java.lang.Override public com.google.cloud.apigeeregistry.v1.ApiSpecOrBuilder getApiSpecsOrBuilder(int index) { return apiSpecs_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < apiSpecs_.size(); i++) { output.writeMessage(1, apiSpecs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < apiSpecs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, apiSpecs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse)) { return super.equals(obj); } com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse other = (com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse) obj; if (!getApiSpecsList().equals(other.getApiSpecsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getApiSpecsCount() > 0) { hash = (37 * hash) + API_SPECS_FIELD_NUMBER; hash = (53 * hash) + getApiSpecsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListApiSpecRevisionsResponse. * </pre> * * Protobuf type {@code google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse) com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apigeeregistry.v1.RegistryServiceProto .internal_static_google_cloud_apigeeregistry_v1_ListApiSpecRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apigeeregistry.v1.RegistryServiceProto .internal_static_google_cloud_apigeeregistry_v1_ListApiSpecRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse.class, com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse.Builder.class); } // Construct using com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (apiSpecsBuilder_ == null) { apiSpecs_ = java.util.Collections.emptyList(); } else { apiSpecs_ = null; apiSpecsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apigeeregistry.v1.RegistryServiceProto .internal_static_google_cloud_apigeeregistry_v1_ListApiSpecRevisionsResponse_descriptor; } @java.lang.Override public com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse getDefaultInstanceForType() { return com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse build() { com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse buildPartial() { com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse result = new com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse result) { if (apiSpecsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { apiSpecs_ = java.util.Collections.unmodifiableList(apiSpecs_); bitField0_ = (bitField0_ & ~0x00000001); } result.apiSpecs_ = apiSpecs_; } else { result.apiSpecs_ = apiSpecsBuilder_.build(); } } private void buildPartial0( com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse) { return mergeFrom((com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse other) { if (other == com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse.getDefaultInstance()) return this; if (apiSpecsBuilder_ == null) { if (!other.apiSpecs_.isEmpty()) { if (apiSpecs_.isEmpty()) { apiSpecs_ = other.apiSpecs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureApiSpecsIsMutable(); apiSpecs_.addAll(other.apiSpecs_); } onChanged(); } } else { if (!other.apiSpecs_.isEmpty()) { if (apiSpecsBuilder_.isEmpty()) { apiSpecsBuilder_.dispose(); apiSpecsBuilder_ = null; apiSpecs_ = other.apiSpecs_; bitField0_ = (bitField0_ & ~0x00000001); apiSpecsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getApiSpecsFieldBuilder() : null; } else { apiSpecsBuilder_.addAllMessages(other.apiSpecs_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.apigeeregistry.v1.ApiSpec m = input.readMessage( com.google.cloud.apigeeregistry.v1.ApiSpec.parser(), extensionRegistry); if (apiSpecsBuilder_ == null) { ensureApiSpecsIsMutable(); apiSpecs_.add(m); } else { apiSpecsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.apigeeregistry.v1.ApiSpec> apiSpecs_ = java.util.Collections.emptyList(); private void ensureApiSpecsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { apiSpecs_ = new java.util.ArrayList<com.google.cloud.apigeeregistry.v1.ApiSpec>(apiSpecs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apigeeregistry.v1.ApiSpec, com.google.cloud.apigeeregistry.v1.ApiSpec.Builder, com.google.cloud.apigeeregistry.v1.ApiSpecOrBuilder> apiSpecsBuilder_; /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public java.util.List<com.google.cloud.apigeeregistry.v1.ApiSpec> getApiSpecsList() { if (apiSpecsBuilder_ == null) { return java.util.Collections.unmodifiableList(apiSpecs_); } else { return apiSpecsBuilder_.getMessageList(); } } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public int getApiSpecsCount() { if (apiSpecsBuilder_ == null) { return apiSpecs_.size(); } else { return apiSpecsBuilder_.getCount(); } } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public com.google.cloud.apigeeregistry.v1.ApiSpec getApiSpecs(int index) { if (apiSpecsBuilder_ == null) { return apiSpecs_.get(index); } else { return apiSpecsBuilder_.getMessage(index); } } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder setApiSpecs(int index, com.google.cloud.apigeeregistry.v1.ApiSpec value) { if (apiSpecsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApiSpecsIsMutable(); apiSpecs_.set(index, value); onChanged(); } else { apiSpecsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder setApiSpecs( int index, com.google.cloud.apigeeregistry.v1.ApiSpec.Builder builderForValue) { if (apiSpecsBuilder_ == null) { ensureApiSpecsIsMutable(); apiSpecs_.set(index, builderForValue.build()); onChanged(); } else { apiSpecsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder addApiSpecs(com.google.cloud.apigeeregistry.v1.ApiSpec value) { if (apiSpecsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApiSpecsIsMutable(); apiSpecs_.add(value); onChanged(); } else { apiSpecsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder addApiSpecs(int index, com.google.cloud.apigeeregistry.v1.ApiSpec value) { if (apiSpecsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureApiSpecsIsMutable(); apiSpecs_.add(index, value); onChanged(); } else { apiSpecsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder addApiSpecs(com.google.cloud.apigeeregistry.v1.ApiSpec.Builder builderForValue) { if (apiSpecsBuilder_ == null) { ensureApiSpecsIsMutable(); apiSpecs_.add(builderForValue.build()); onChanged(); } else { apiSpecsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder addApiSpecs( int index, com.google.cloud.apigeeregistry.v1.ApiSpec.Builder builderForValue) { if (apiSpecsBuilder_ == null) { ensureApiSpecsIsMutable(); apiSpecs_.add(index, builderForValue.build()); onChanged(); } else { apiSpecsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder addAllApiSpecs( java.lang.Iterable<? extends com.google.cloud.apigeeregistry.v1.ApiSpec> values) { if (apiSpecsBuilder_ == null) { ensureApiSpecsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, apiSpecs_); onChanged(); } else { apiSpecsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder clearApiSpecs() { if (apiSpecsBuilder_ == null) { apiSpecs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { apiSpecsBuilder_.clear(); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public Builder removeApiSpecs(int index) { if (apiSpecsBuilder_ == null) { ensureApiSpecsIsMutable(); apiSpecs_.remove(index); onChanged(); } else { apiSpecsBuilder_.remove(index); } return this; } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public com.google.cloud.apigeeregistry.v1.ApiSpec.Builder getApiSpecsBuilder(int index) { return getApiSpecsFieldBuilder().getBuilder(index); } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public com.google.cloud.apigeeregistry.v1.ApiSpecOrBuilder getApiSpecsOrBuilder(int index) { if (apiSpecsBuilder_ == null) { return apiSpecs_.get(index); } else { return apiSpecsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public java.util.List<? extends com.google.cloud.apigeeregistry.v1.ApiSpecOrBuilder> getApiSpecsOrBuilderList() { if (apiSpecsBuilder_ != null) { return apiSpecsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(apiSpecs_); } } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public com.google.cloud.apigeeregistry.v1.ApiSpec.Builder addApiSpecsBuilder() { return getApiSpecsFieldBuilder() .addBuilder(com.google.cloud.apigeeregistry.v1.ApiSpec.getDefaultInstance()); } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public com.google.cloud.apigeeregistry.v1.ApiSpec.Builder addApiSpecsBuilder(int index) { return getApiSpecsFieldBuilder() .addBuilder(index, com.google.cloud.apigeeregistry.v1.ApiSpec.getDefaultInstance()); } /** * * * <pre> * The revisions of the spec. * </pre> * * <code>repeated .google.cloud.apigeeregistry.v1.ApiSpec api_specs = 1;</code> */ public java.util.List<com.google.cloud.apigeeregistry.v1.ApiSpec.Builder> getApiSpecsBuilderList() { return getApiSpecsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apigeeregistry.v1.ApiSpec, com.google.cloud.apigeeregistry.v1.ApiSpec.Builder, com.google.cloud.apigeeregistry.v1.ApiSpecOrBuilder> getApiSpecsFieldBuilder() { if (apiSpecsBuilder_ == null) { apiSpecsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apigeeregistry.v1.ApiSpec, com.google.cloud.apigeeregistry.v1.ApiSpec.Builder, com.google.cloud.apigeeregistry.v1.ApiSpecOrBuilder>( apiSpecs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); apiSpecs_ = null; } return apiSpecsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse) private static final com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse(); } public static com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListApiSpecRevisionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListApiSpecRevisionsResponse>() { @java.lang.Override public ListApiSpecRevisionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListApiSpecRevisionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListApiSpecRevisionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apigeeregistry.v1.ListApiSpecRevisionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,707
java-maps-addressvalidation/proto-google-maps-addressvalidation-v1/src/main/java/com/google/maps/addressvalidation/v1/ProvideValidationFeedbackRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/maps/addressvalidation/v1/address_validation_service.proto // Protobuf Java Version: 3.25.8 package com.google.maps.addressvalidation.v1; /** * * * <pre> * The request for sending validation feedback. * </pre> * * Protobuf type {@code google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest} */ public final class ProvideValidationFeedbackRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest) ProvideValidationFeedbackRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ProvideValidationFeedbackRequest.newBuilder() to construct. private ProvideValidationFeedbackRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ProvideValidationFeedbackRequest() { conclusion_ = 0; responseId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ProvideValidationFeedbackRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.maps.addressvalidation.v1.AddressValidationServiceProto .internal_static_google_maps_addressvalidation_v1_ProvideValidationFeedbackRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.maps.addressvalidation.v1.AddressValidationServiceProto .internal_static_google_maps_addressvalidation_v1_ProvideValidationFeedbackRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.class, com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.Builder.class); } /** * * * <pre> * The possible final outcomes of the sequence of address validation requests * needed to validate an address. * </pre> * * Protobuf enum {@code * google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion} */ public enum ValidationConclusion implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * This value is unused. * If the `ProvideValidationFeedbackRequest.conclusion` field is set to * `VALIDATION_CONCLUSION_UNSPECIFIED`, an `INVALID_ARGUMENT` error will be * returned. * </pre> * * <code>VALIDATION_CONCLUSION_UNSPECIFIED = 0;</code> */ VALIDATION_CONCLUSION_UNSPECIFIED(0), /** * * * <pre> * The version of the address returned by the Address Validation API was * used for the transaction. * </pre> * * <code>VALIDATED_VERSION_USED = 1;</code> */ VALIDATED_VERSION_USED(1), /** * * * <pre> * The version of the address provided by the user was used for the * transaction * </pre> * * <code>USER_VERSION_USED = 2;</code> */ USER_VERSION_USED(2), /** * * * <pre> * A version of the address that was entered after the last validation * attempt but that was not re-validated was used for the transaction. * </pre> * * <code>UNVALIDATED_VERSION_USED = 3;</code> */ UNVALIDATED_VERSION_USED(3), /** * * * <pre> * The transaction was abandoned and the address was not used. * </pre> * * <code>UNUSED = 4;</code> */ UNUSED(4), UNRECOGNIZED(-1), ; /** * * * <pre> * This value is unused. * If the `ProvideValidationFeedbackRequest.conclusion` field is set to * `VALIDATION_CONCLUSION_UNSPECIFIED`, an `INVALID_ARGUMENT` error will be * returned. * </pre> * * <code>VALIDATION_CONCLUSION_UNSPECIFIED = 0;</code> */ public static final int VALIDATION_CONCLUSION_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The version of the address returned by the Address Validation API was * used for the transaction. * </pre> * * <code>VALIDATED_VERSION_USED = 1;</code> */ public static final int VALIDATED_VERSION_USED_VALUE = 1; /** * * * <pre> * The version of the address provided by the user was used for the * transaction * </pre> * * <code>USER_VERSION_USED = 2;</code> */ public static final int USER_VERSION_USED_VALUE = 2; /** * * * <pre> * A version of the address that was entered after the last validation * attempt but that was not re-validated was used for the transaction. * </pre> * * <code>UNVALIDATED_VERSION_USED = 3;</code> */ public static final int UNVALIDATED_VERSION_USED_VALUE = 3; /** * * * <pre> * The transaction was abandoned and the address was not used. * </pre> * * <code>UNUSED = 4;</code> */ public static final int UNUSED_VALUE = 4; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ValidationConclusion valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ValidationConclusion forNumber(int value) { switch (value) { case 0: return VALIDATION_CONCLUSION_UNSPECIFIED; case 1: return VALIDATED_VERSION_USED; case 2: return USER_VERSION_USED; case 3: return UNVALIDATED_VERSION_USED; case 4: return UNUSED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ValidationConclusion> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<ValidationConclusion> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ValidationConclusion>() { public ValidationConclusion findValueByNumber(int number) { return ValidationConclusion.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.getDescriptor() .getEnumTypes() .get(0); } private static final ValidationConclusion[] VALUES = values(); public static ValidationConclusion valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private ValidationConclusion(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion) } public static final int CONCLUSION_FIELD_NUMBER = 1; private int conclusion_ = 0; /** * * * <pre> * Required. The outcome of the sequence of validation attempts. * * If this field is set to `VALIDATION_CONCLUSION_UNSPECIFIED`, an * `INVALID_ARGUMENT` error will be returned. * </pre> * * <code> * .google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion conclusion = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for conclusion. */ @java.lang.Override public int getConclusionValue() { return conclusion_; } /** * * * <pre> * Required. The outcome of the sequence of validation attempts. * * If this field is set to `VALIDATION_CONCLUSION_UNSPECIFIED`, an * `INVALID_ARGUMENT` error will be returned. * </pre> * * <code> * .google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion conclusion = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conclusion. */ @java.lang.Override public com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion getConclusion() { com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion result = com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .ValidationConclusion.forNumber(conclusion_); return result == null ? com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion .UNRECOGNIZED : result; } public static final int RESPONSE_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object responseId_ = ""; /** * * * <pre> * Required. The ID of the response that this feedback is for. This should be * the * [response_id][google.maps.addressvalidation.v1.ValidateAddressRequest.response_id] * from the first response in a series of address validation attempts. * </pre> * * <code>string response_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The responseId. */ @java.lang.Override public java.lang.String getResponseId() { java.lang.Object ref = responseId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); responseId_ = s; return s; } } /** * * * <pre> * Required. The ID of the response that this feedback is for. This should be * the * [response_id][google.maps.addressvalidation.v1.ValidateAddressRequest.response_id] * from the first response in a series of address validation attempts. * </pre> * * <code>string response_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for responseId. */ @java.lang.Override public com.google.protobuf.ByteString getResponseIdBytes() { java.lang.Object ref = responseId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); responseId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (conclusion_ != com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .ValidationConclusion.VALIDATION_CONCLUSION_UNSPECIFIED .getNumber()) { output.writeEnum(1, conclusion_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(responseId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, responseId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (conclusion_ != com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .ValidationConclusion.VALIDATION_CONCLUSION_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, conclusion_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(responseId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, responseId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest)) { return super.equals(obj); } com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest other = (com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest) obj; if (conclusion_ != other.conclusion_) return false; if (!getResponseId().equals(other.getResponseId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CONCLUSION_FIELD_NUMBER; hash = (53 * hash) + conclusion_; hash = (37 * hash) + RESPONSE_ID_FIELD_NUMBER; hash = (53 * hash) + getResponseId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request for sending validation feedback. * </pre> * * Protobuf type {@code google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest) com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.maps.addressvalidation.v1.AddressValidationServiceProto .internal_static_google_maps_addressvalidation_v1_ProvideValidationFeedbackRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.maps.addressvalidation.v1.AddressValidationServiceProto .internal_static_google_maps_addressvalidation_v1_ProvideValidationFeedbackRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.class, com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.Builder.class); } // Construct using // com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; conclusion_ = 0; responseId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.maps.addressvalidation.v1.AddressValidationServiceProto .internal_static_google_maps_addressvalidation_v1_ProvideValidationFeedbackRequest_descriptor; } @java.lang.Override public com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest getDefaultInstanceForType() { return com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .getDefaultInstance(); } @java.lang.Override public com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest build() { com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest buildPartial() { com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest result = new com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.conclusion_ = conclusion_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.responseId_ = responseId_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest) { return mergeFrom( (com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest other) { if (other == com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .getDefaultInstance()) return this; if (other.conclusion_ != 0) { setConclusionValue(other.getConclusionValue()); } if (!other.getResponseId().isEmpty()) { responseId_ = other.responseId_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { conclusion_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { responseId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int conclusion_ = 0; /** * * * <pre> * Required. The outcome of the sequence of validation attempts. * * If this field is set to `VALIDATION_CONCLUSION_UNSPECIFIED`, an * `INVALID_ARGUMENT` error will be returned. * </pre> * * <code> * .google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion conclusion = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for conclusion. */ @java.lang.Override public int getConclusionValue() { return conclusion_; } /** * * * <pre> * Required. The outcome of the sequence of validation attempts. * * If this field is set to `VALIDATION_CONCLUSION_UNSPECIFIED`, an * `INVALID_ARGUMENT` error will be returned. * </pre> * * <code> * .google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion conclusion = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The enum numeric value on the wire for conclusion to set. * @return This builder for chaining. */ public Builder setConclusionValue(int value) { conclusion_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The outcome of the sequence of validation attempts. * * If this field is set to `VALIDATION_CONCLUSION_UNSPECIFIED`, an * `INVALID_ARGUMENT` error will be returned. * </pre> * * <code> * .google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion conclusion = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conclusion. */ @java.lang.Override public com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .ValidationConclusion getConclusion() { com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion result = com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .ValidationConclusion.forNumber(conclusion_); return result == null ? com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest .ValidationConclusion.UNRECOGNIZED : result; } /** * * * <pre> * Required. The outcome of the sequence of validation attempts. * * If this field is set to `VALIDATION_CONCLUSION_UNSPECIFIED`, an * `INVALID_ARGUMENT` error will be returned. * </pre> * * <code> * .google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion conclusion = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The conclusion to set. * @return This builder for chaining. */ public Builder setConclusion( com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; conclusion_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. The outcome of the sequence of validation attempts. * * If this field is set to `VALIDATION_CONCLUSION_UNSPECIFIED`, an * `INVALID_ARGUMENT` error will be returned. * </pre> * * <code> * .google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest.ValidationConclusion conclusion = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearConclusion() { bitField0_ = (bitField0_ & ~0x00000001); conclusion_ = 0; onChanged(); return this; } private java.lang.Object responseId_ = ""; /** * * * <pre> * Required. The ID of the response that this feedback is for. This should be * the * [response_id][google.maps.addressvalidation.v1.ValidateAddressRequest.response_id] * from the first response in a series of address validation attempts. * </pre> * * <code>string response_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The responseId. */ public java.lang.String getResponseId() { java.lang.Object ref = responseId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); responseId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID of the response that this feedback is for. This should be * the * [response_id][google.maps.addressvalidation.v1.ValidateAddressRequest.response_id] * from the first response in a series of address validation attempts. * </pre> * * <code>string response_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for responseId. */ public com.google.protobuf.ByteString getResponseIdBytes() { java.lang.Object ref = responseId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); responseId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID of the response that this feedback is for. This should be * the * [response_id][google.maps.addressvalidation.v1.ValidateAddressRequest.response_id] * from the first response in a series of address validation attempts. * </pre> * * <code>string response_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The responseId to set. * @return This builder for chaining. */ public Builder setResponseId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } responseId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The ID of the response that this feedback is for. This should be * the * [response_id][google.maps.addressvalidation.v1.ValidateAddressRequest.response_id] * from the first response in a series of address validation attempts. * </pre> * * <code>string response_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearResponseId() { responseId_ = getDefaultInstance().getResponseId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The ID of the response that this feedback is for. This should be * the * [response_id][google.maps.addressvalidation.v1.ValidateAddressRequest.response_id] * from the first response in a series of address validation attempts. * </pre> * * <code>string response_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for responseId to set. * @return This builder for chaining. */ public Builder setResponseIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); responseId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest) } // @@protoc_insertion_point(class_scope:google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest) private static final com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest(); } public static com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ProvideValidationFeedbackRequest> PARSER = new com.google.protobuf.AbstractParser<ProvideValidationFeedbackRequest>() { @java.lang.Override public ProvideValidationFeedbackRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ProvideValidationFeedbackRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ProvideValidationFeedbackRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.maps.addressvalidation.v1.ProvideValidationFeedbackRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/myfaces
36,436
impl/src/test/java/org/apache/myfaces/view/facelets/tag/ui/RepeatTestCase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.view.facelets.tag.ui; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import jakarta.el.ELContext; import jakarta.el.ELException; import jakarta.el.ValueExpression; import jakarta.faces.FacesException; import jakarta.faces.component.ContextCallback; import jakarta.faces.component.UIComponent; import jakarta.faces.component.UIViewRoot; import jakarta.faces.component.visit.VisitCallback; import jakarta.faces.component.visit.VisitContext; import jakarta.faces.component.visit.VisitResult; import jakarta.faces.context.FacesContext; import jakarta.faces.context.ResponseWriter; import org.apache.myfaces.view.facelets.AbstractFaceletTestCase; import org.apache.myfaces.view.facelets.bean.Company; import org.apache.myfaces.view.facelets.bean.Example; import org.apache.myfaces.view.facelets.component.UIRepeat; import org.apache.myfaces.util.lang.FastWriter; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; /** * Tests for UIRepeat. * * @author Jakob Korherr (latest modification by $Author$) * @version $Revision$ $Date$ */ public class RepeatTestCase extends AbstractFaceletTestCase { @Test public void testRepeat() throws Exception { Company c = Example.createCompany(); facesContext.getExternalContext().getRequestMap().put("company", c); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "repeat.xml"); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); root.encodeAll(facesContext); String content = fw.toString(); int hrIndex = content.indexOf("<dt>HR</dt>"); Assertions.assertNotSame(-1, hrIndex); int rdIndex = content.indexOf("<dt>RD</dt>", hrIndex); Assertions.assertNotSame(-1, rdIndex); int empIndex1 = content.indexOf( "<dd class=\"3\">Ellen, Sue</dd><dd class=\"4\">Scooner, Mary</dd>", hrIndex); Assertions.assertNotSame(-1, empIndex1); int empIndex2 = content.indexOf( "<dd class=\"6\">Burns, Ed</dd><dd class=\"7\">Lubke, Ryan</dd><dd class=\"8\">Kitain, Roger</dd>", rdIndex); Assertions.assertNotSame(-1, empIndex2); int hrIndex2 = content.indexOf("<li class=\"HR\">HR</li>"); Assertions.assertNotSame(-1, hrIndex2); int rdIndex2 = content.indexOf("<li class=\"RD\">RD</li>", hrIndex2); Assertions.assertNotSame(-1, rdIndex2); } /** * Tests UIRepeat.invokeOnComponent() including var and varStatus properties. * @throws IOException */ @Test @SuppressWarnings("unchecked") public void testInvokeOnComponent() throws IOException { // put the values for ui:repeat on the request map final String[] repeatValues = new String[]{ "a", "b", "c" }; externalContext.getRequestMap().put("repeatValues", repeatValues); // build testUIRepeat.xhtml UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "testUIRepeat.xhtml"); // get the component instances UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); UIComponent outputText = repeat.getChildren().get(0); // create the ContextCallback TestContextCallback callback = new TestContextCallback(facesContext); // save some values in #{row} and #{status} and test the // automatic saving and restoring of them final String var = "row"; final String varStatus = "status"; final String varValue = "someVarValue"; final String statusValue = "someStatusValue"; externalContext.getRequestMap().put(var, varValue); externalContext.getRequestMap().put(varStatus, statusValue); // invokeOnComponent on UIRepeat itself String invokeId = "form:repeat"; Assertions.assertTrue(root.invokeOnComponent(facesContext, invokeId, callback)); Assertions.assertEquals(repeat, callback._lastTarget); Assertions.assertEquals(varValue, callback._rowValue); // previous set varValue Assertions.assertEquals(statusValue, callback._repeatStatus); // previous set statusValue // invokeOnComponent on a child of UIRepeat in the first row invokeId = "form:repeat:0:outputText"; Assertions.assertTrue(root.invokeOnComponent(facesContext, invokeId, callback)); Assertions.assertEquals(outputText, callback._lastTarget); Assertions.assertEquals(repeatValues[0], callback._rowValue); Assertions.assertEquals(0, callback._index); Assertions.assertEquals(true, callback._first); Assertions.assertEquals(false, callback._last); Assertions.assertEquals(true, callback._even); // invokeOnComponent on a child of UIRepeat in the second row invokeId = "form:repeat:1:outputText"; Assertions.assertTrue(root.invokeOnComponent(facesContext, invokeId, callback)); Assertions.assertEquals(outputText, callback._lastTarget); Assertions.assertEquals(repeatValues[1], callback._rowValue); Assertions.assertEquals(1, callback._index); Assertions.assertEquals(false, callback._first); Assertions.assertEquals(false, callback._last); Assertions.assertEquals(false, callback._even); // invokeOnComponent on a child of UIRepeat in the third row invokeId = "form:repeat:2:outputText"; Assertions.assertTrue(root.invokeOnComponent(facesContext, invokeId, callback)); Assertions.assertEquals(outputText, callback._lastTarget); Assertions.assertEquals(repeatValues[2], callback._rowValue); Assertions.assertEquals(2, callback._index); Assertions.assertEquals(false, callback._first); Assertions.assertEquals(true, callback._last); Assertions.assertEquals(true, callback._even); // invokeOnComponent on a child of UIRepeat with invalid row (-1) invokeId = "form:repeat:outputText"; Assertions.assertTrue(root.invokeOnComponent(facesContext, invokeId, callback)); // after all these calls to invokeOnComponent, row and status still // have to be the same like before Assertions.assertEquals(varValue, externalContext.getRequestMap().get(var)); Assertions.assertEquals(statusValue, externalContext.getRequestMap().get(varStatus)); // remove the values from the request map externalContext.getRequestMap().remove("repeatValues"); externalContext.getRequestMap().remove(var); externalContext.getRequestMap().remove(varStatus); } /** * ContextCallback to test invokeOnComponent() including var and varStatus properties. * @author Jakob Korherr */ private static class TestContextCallback implements ContextCallback { private UIComponent _lastTarget; private Object _rowValue; private Object _repeatStatus; private Object _index; private Object _first, _last, _even; private ValueExpression _rowValueExpression; private ValueExpression _statusValueExpression; private ValueExpression _indexValueExpression; private ValueExpression _firstValueExpression; private ValueExpression _lastValueExpression; private ValueExpression _evenValueExpression; public TestContextCallback(FacesContext context) { _rowValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{row}", Object.class); _statusValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{status}", Object.class); _indexValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{status.index}", Object.class); _firstValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{status.first}", Object.class); _lastValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{status.last}", Object.class); _evenValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{status.even}", Object.class); } public void invokeContextCallback(FacesContext context, UIComponent target) { _lastTarget = target; // evaluate ValueExpressions ELContext elCtx = context.getELContext(); _rowValue = _rowValueExpression.getValue(elCtx); _repeatStatus = _statusValueExpression.getValue(elCtx); try { _index = _indexValueExpression.getValue(elCtx); _first = _firstValueExpression.getValue(elCtx); _last = _lastValueExpression.getValue(elCtx); _even = _evenValueExpression.getValue(elCtx); } catch (ELException ele) { // repeatStatus is some other object, so these values are all null _index = _first = _last = _even = null; } } } /** * Tests UIRepeat.visitTree(). * @throws IOException */ @Test @SuppressWarnings("unchecked") public void testVisitTree() throws IOException { // put the values for ui:repeat on the request map final String[] repeatValues = new String[]{ "a", "b", "c" }; externalContext.getRequestMap().put("repeatValues", repeatValues); // build testUIRepeat.xhtml UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "testUIRepeat.xhtml"); // get the component instances UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); // create the VisitCallback TestVisitCallback testVisitCallback = new TestVisitCallback(facesContext, repeatValues); // save some values in #{row} and #{status} and test the // automatic saving and restoring of them final String var = "row"; final String varStatus = "status"; final String varValue = "someVarValue"; final String statusValue = "someStatusValue"; externalContext.getRequestMap().put(var, varValue); externalContext.getRequestMap().put(varStatus, statusValue); // perform visit repeat.visitTree(VisitContext.createVisitContext(facesContext), testVisitCallback); // created expected List List<String> expectedClientIds = new ArrayList<String>(); expectedClientIds.add("form:repeat"); expectedClientIds.add("form:repeat:0:outputText"); expectedClientIds.add("form:repeat:1:outputText"); expectedClientIds.add("form:repeat:2:outputText"); // see if we got the expected result Assertions.assertEquals(expectedClientIds, testVisitCallback._visitedClientIds); // after the tree visit, row and status still // have to be the same like before Assertions.assertEquals(varValue, externalContext.getRequestMap().get(var)); Assertions.assertEquals(statusValue, externalContext.getRequestMap().get(varStatus)); // remove the values from the request map externalContext.getRequestMap().remove("repeatValues"); externalContext.getRequestMap().remove(var); externalContext.getRequestMap().remove(varStatus); } /** * VisitCallback to test visitTree(). * @author Jakob Korherr */ private static class TestVisitCallback implements VisitCallback { private List<String> _visitedClientIds; private ValueExpression _rowValueExpression; private ValueExpression _indexValueExpression; private String[] _repeatValues; public TestVisitCallback(FacesContext context, String[] repeatValues) { _repeatValues = repeatValues; _visitedClientIds = new ArrayList<String>(); _rowValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{row}", Object.class); _indexValueExpression = context.getApplication().getExpressionFactory() .createValueExpression(context.getELContext(), "#{status.index}", Object.class); } public VisitResult visit(VisitContext context, UIComponent target) { final String clientId = target.getClientId(context.getFacesContext()); if (_visitedClientIds.contains(clientId)) { Assertions.fail("Component with clientId " + clientId + " visited twice!"); } else { _visitedClientIds.add(clientId); if (!(target instanceof UIRepeat)) { // test #{row} and #{status.index} ELContext elCtx = context.getFacesContext().getELContext(); Object indexObject = _indexValueExpression.getValue(elCtx); // indexObject has to be an Integer Assertions.assertTrue(indexObject instanceof Integer); Integer index = (Integer) indexObject; // the index has to be part of the clientId Assertions.assertTrue(clientId.contains("" + index)); Object rowValue = _rowValueExpression.getValue(elCtx); // #{row} has to be the repeatValue for the current index Assertions.assertEquals(_repeatValues[index], rowValue); } } return VisitResult.ACCEPT; } } @Test public void testRepeatOffset() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); // offset="2" size="1" should render only 1 row int itemIndex1 = content.indexOf("B1"); Assertions.assertEquals(-1, itemIndex1); int itemIndex2 = content.indexOf("B2"); Assertions.assertEquals(-1, itemIndex2); String item1 = "B3"; int itemIndex3 = content.indexOf(item1); Assertions.assertNotSame(-1, itemIndex3); String item2 = "B4"; // the second item should not be there Assertions.assertEquals(-1, content.indexOf(item2, itemIndex1+2)); } @Test public void testRepeatOffset_0() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat0"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); int itemIndex1 = content.indexOf("B1"); Assertions.assertNotSame(-1, itemIndex1); int itemIndex2 = content.indexOf("B2"); Assertions.assertNotSame(-1, itemIndex2); int itemIndex3 = content.indexOf("B3"); Assertions.assertEquals(-1, itemIndex3); } @Test public void testRepeatOffset_0_7() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat0_7"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); int itemIndex1 = content.indexOf("B1"); Assertions.assertNotSame(-1, itemIndex1); int itemIndex2 = content.indexOf("B2", itemIndex1); Assertions.assertNotSame(-1, itemIndex2); int itemIndex3 = content.indexOf("B3", itemIndex2); Assertions.assertNotSame(-1, itemIndex3); int itemIndex4 = content.indexOf("B4", itemIndex3); Assertions.assertNotSame(-1, itemIndex4); int itemIndex5 = content.indexOf("B5", itemIndex4); Assertions.assertNotSame(-1, itemIndex5); int itemIndex6 = content.indexOf("B6", itemIndex5); Assertions.assertNotSame(-1, itemIndex6); int itemIndex7 = content.indexOf("B7", itemIndex6); Assertions.assertNotSame(-1, itemIndex7); } @Test public void testRepeatOffset_0_8() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat0_8"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); try { repeat.encodeAll(facesContext); Assertions.fail(); } catch(FacesException e) { // size cannot be greater than collection size } } @Test public void testRepeatOffset_1() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat1"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); int itemIndex1 = content.indexOf("B1"); Assertions.assertEquals(-1, itemIndex1); int itemIndex2 = content.indexOf("B2"); Assertions.assertNotSame(-1, itemIndex2); int itemIndex3 = content.indexOf("B3", itemIndex2); Assertions.assertNotSame(-1, itemIndex3); int itemIndex4 = content.indexOf("B4", itemIndex3); Assertions.assertNotSame(-1, itemIndex4); int itemIndex5 = content.indexOf("B5", itemIndex4); Assertions.assertEquals(-1, itemIndex5); } @Test public void testRepeatOffset_1_7() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat1_7"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); int itemIndex1 = content.indexOf("B1"); Assertions.assertEquals(-1, itemIndex1); int itemIndex2 = content.indexOf("B2"); Assertions.assertNotSame(-1, itemIndex2); int itemIndex3 = content.indexOf("B3", itemIndex2); Assertions.assertNotSame(-1, itemIndex3); int itemIndex4 = content.indexOf("B4", itemIndex3); Assertions.assertNotSame(-1, itemIndex4); int itemIndex5 = content.indexOf("B5", itemIndex4); Assertions.assertNotSame(-1, itemIndex5); int itemIndex6 = content.indexOf("B6", itemIndex5); Assertions.assertNotSame(-1, itemIndex6); int itemIndex7 = content.indexOf("B7", itemIndex6); Assertions.assertNotSame(-1, itemIndex7); } @Test public void testRepeatOffset_1_8() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat1_8"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); try { repeat.encodeAll(facesContext); Assertions.fail(); } catch(FacesException e) { // size cannot be greater than collection size } } @Test public void testRepeatOffset2() throws Exception { final String[] repeatValues = new String[] {"B1", "B2", "B3", "B4", "B5", "B6", "B7"}; facesContext.getExternalContext().getRequestMap().put("repeatValues", repeatValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_offset2.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); int itemIndex1 = content.indexOf("B1"); Assertions.assertEquals(-1, itemIndex1); int itemIndex2 = content.indexOf("B2"); Assertions.assertEquals(-1, itemIndex2); int itemIndex3 = content.indexOf("B3"); Assertions.assertNotSame(-1, itemIndex3); int itemIndex4 = content.indexOf("B4", itemIndex3); Assertions.assertNotSame(-1, itemIndex4); int itemIndex5 = content.indexOf("B5", itemIndex4); Assertions.assertNotSame(-1, itemIndex5); int itemIndex6 = content.indexOf("B6", itemIndex5); Assertions.assertNotSame(-1, itemIndex6); int itemIndex7 = content.indexOf("B7", itemIndex6); Assertions.assertNotSame(-1, itemIndex7); //System.out.println(fw); } @Test @SuppressWarnings("unchecked") public void testInvokeOnComponentBeginEnd() throws IOException { UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "testUIRepeatBeginEnd.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); Assertions.assertTrue(content.contains("Hello 1")); Assertions.assertTrue(content.contains("Hello 2")); Assertions.assertTrue(content.contains("Hello 3")); Assertions.assertFalse(content.contains("Hello 0")); Assertions.assertFalse(content.contains("Hello 4")); } @Test @SuppressWarnings("unchecked") public void testInvokeOnNullModel() throws IOException { final List<String> modelValues = null; final List<String> loadedModelValues = Arrays.asList("Claire", "Michael"); facesContext.getExternalContext().getRequestMap().put("modelValues", modelValues); facesContext.getExternalContext().getRequestMap().put("loadedModelValues", loadedModelValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "testUIRepeatEmpty.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); Assertions.assertFalse(content.contains("Hello ")); } @Test @SuppressWarnings("unchecked") public void testInvokeOnEmptyList() throws IOException { final List<String> modelValues = Collections.emptyList(); final List<String> loadedModelValues = Arrays.asList("Claire", "Michael"); facesContext.getExternalContext().getRequestMap().put("modelValues", modelValues); facesContext.getExternalContext().getRequestMap().put("loadedModelValues", loadedModelValues); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "testUIRepeatEmpty.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); Assertions.assertFalse(content.contains("Hello ")); } @Test @SuppressWarnings("unchecked") public void testInvokeModelStep_1() throws IOException { final List<String> values = Arrays.asList("User #0", "User #1", "User #2", "User #3", "User #4", "User #5", "User #6", "User #7", "User #8", "User #9", "User #10"); IterationBean iterationBean = new IterationBean(1, 7, 1, values); facesContext.getExternalContext().getRequestMap().put("iterationBean", iterationBean); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_model_step.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); Assertions.assertTrue(content.contains("User #1")); Assertions.assertTrue(content.contains("User #2")); Assertions.assertTrue(content.contains("User #3")); Assertions.assertTrue(content.contains("User #4")); Assertions.assertTrue(content.contains("User #5")); Assertions.assertTrue(content.contains("User #6")); Assertions.assertTrue(content.contains("User #7")); Assertions.assertFalse(content.contains("User #0")); Assertions.assertFalse(content.contains("User #8")); Assertions.assertFalse(content.contains("User #9")); Assertions.assertFalse(content.contains("User #10")); } @Test @SuppressWarnings("unchecked") public void testInvokeModelStep_2() throws IOException { final List<String> values = Arrays.asList("User #0", "User #1", "User #2", "User #3", "User #4", "User #5", "User #6", "User #7", "User #8", "User #9", "User #10"); IterationBean iterationBean = new IterationBean(1, 7, 2, values); facesContext.getExternalContext().getRequestMap().put("iterationBean", iterationBean); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_model_step.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); Assertions.assertTrue(content.contains("User #1")); Assertions.assertTrue(content.contains("User #3")); Assertions.assertTrue(content.contains("User #5")); Assertions.assertTrue(content.contains("User #7")); Assertions.assertFalse(content.contains("User #0")); Assertions.assertFalse(content.contains("User #2")); Assertions.assertFalse(content.contains("User #4")); Assertions.assertFalse(content.contains("User #6")); Assertions.assertFalse(content.contains("User #8")); Assertions.assertFalse(content.contains("User #9")); Assertions.assertFalse(content.contains("User #10")); } @Test @SuppressWarnings("unchecked") public void testInvokeModelStep_3() throws IOException { final List<String> values = Arrays.asList("User #0", "User #1", "User #2", "User #3", "User #4", "User #5", "User #6", "User #7", "User #8", "User #9", "User #10"); IterationBean iterationBean = new IterationBean(2, 7, 3, values); facesContext.getExternalContext().getRequestMap().put("iterationBean", iterationBean); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_model_step.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); Assertions.assertTrue(content.contains("User #2")); Assertions.assertTrue(content.contains("User #5")); Assertions.assertFalse(content.contains("User #0")); Assertions.assertFalse(content.contains("User #1")); Assertions.assertFalse(content.contains("User #3")); Assertions.assertFalse(content.contains("User #4")); Assertions.assertFalse(content.contains("User #6")); Assertions.assertFalse(content.contains("User #7")); Assertions.assertFalse(content.contains("User #8")); Assertions.assertFalse(content.contains("User #9")); Assertions.assertFalse(content.contains("User #10")); } @Test @SuppressWarnings("unchecked") public void testInvokeModelChangeStep_3() throws IOException { final List<String> values = Arrays.asList("User #0", "User #1", "User #2", "User #3", "User #4", "User #5", "User #6", "User #7", "User #8", "User #9", "User #10"); IterationBean iterationBean = new IterationBean(2, 7, 3, values); facesContext.getExternalContext().getRequestMap().put("iterationBean", iterationBean); UIViewRoot root = facesContext.getViewRoot(); vdl.buildView(facesContext, root, "ui_repeat_model_step.xhtml"); UIRepeat repeat = (UIRepeat) root.findComponent("form:repeat"); Assertions.assertNotNull(repeat); FastWriter fw = new FastWriter(); ResponseWriter rw = facesContext.getResponseWriter(); rw = rw.cloneWithWriter(fw); facesContext.setResponseWriter(rw); repeat.encodeAll(facesContext); String content = fw.toString(); Assertions.assertTrue(content.contains("User #2")); Assertions.assertTrue(content.contains("User #5")); Assertions.assertFalse(content.contains("User #0")); Assertions.assertFalse(content.contains("User #1")); Assertions.assertFalse(content.contains("User #3")); Assertions.assertFalse(content.contains("User #4")); Assertions.assertFalse(content.contains("User #6")); Assertions.assertFalse(content.contains("User #7")); Assertions.assertFalse(content.contains("User #8")); Assertions.assertFalse(content.contains("User #9")); Assertions.assertFalse(content.contains("User #10")); } public class IterationBean { private int begin; private int end; private int step; private List<String> values; public IterationBean(int begin, int end, int step, List<String> values) { this.begin = begin; this.end = end; this.step = step; this.values = values; } public int getBegin() { return begin; } public int getEnd() { return end; } public int getStep() { return step; } public void setStep(int step) { this.step = step; } public List<String> getValues() { return values; } } }
apache/sedona
36,930
common/src/main/java/org/apache/sedona/common/S2Geography/WKTReader.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sedona.common.S2Geography; import com.google.common.geometry.*; import java.io.IOException; import java.io.Reader; import java.io.StreamTokenizer; import java.io.StringReader; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Locale; import org.locationtech.jts.geom.*; import org.locationtech.jts.geom.impl.CoordinateArraySequenceFactory; import org.locationtech.jts.io.Ordinate; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTConstants; import org.locationtech.jts.util.Assert; public class WKTReader { private static final String COMMA = ","; private static final String L_PAREN = "("; private static final String R_PAREN = ")"; private static final String NAN_SYMBOL = "NaN"; private GeometryFactory geometryFactory; private CoordinateSequenceFactory csFactory; private static CoordinateSequenceFactory csFactoryXYZM = CoordinateArraySequenceFactory.instance(); private PrecisionModel precisionModel; /** Flag indicating that the old notation of coordinates in JTS is supported. */ private static final boolean ALLOW_OLD_JTS_COORDINATE_SYNTAX = true; private boolean isAllowOldJtsCoordinateSyntax = ALLOW_OLD_JTS_COORDINATE_SYNTAX; /** Flag indicating that the old notation of MultiPoint coordinates in JTS is supported. */ private static final boolean ALLOW_OLD_JTS_MULTIPOINT_SYNTAX = true; private boolean isAllowOldJtsMultipointSyntax = ALLOW_OLD_JTS_MULTIPOINT_SYNTAX; private boolean isFixStructure = false; public WKTReader() { this.geometryFactory = new GeometryFactory(); this.csFactory = geometryFactory.getCoordinateSequenceFactory(); this.precisionModel = geometryFactory.getPrecisionModel(); } public WKTReader(GeometryFactory geometryFactory) { this.geometryFactory = geometryFactory; this.csFactory = geometryFactory.getCoordinateSequenceFactory(); this.precisionModel = geometryFactory.getPrecisionModel(); } /** * Sets a flag indicating, that coordinates may have 3 ordinate values even though no Z or M * ordinate indicator is present. The default value is {@link #ALLOW_OLD_JTS_COORDINATE_SYNTAX}. * * @param value a boolean value */ public void setIsOldJtsCoordinateSyntaxAllowed(boolean value) { isAllowOldJtsCoordinateSyntax = value; } /** * Sets a flag indicating, that point coordinates in a MultiPoint geometry must not be enclosed in * paren. The default value is {@link #ALLOW_OLD_JTS_MULTIPOINT_SYNTAX} * * @param value a boolean value */ public void setIsOldJtsMultiPointSyntaxAllowed(boolean value) { isAllowOldJtsMultipointSyntax = value; } /** * Sets a flag indicating that the structure of input geometry should be fixed so that the * geometry can be constructed without error. This involves adding coordinates if the input * coordinate sequence is shorter than required. * * @param isFixStructure true if the input structure should be fixed * @see LinearRing#MINIMUM_VALID_SIZE */ public void setFixStructure(boolean isFixStructure) { this.isFixStructure = isFixStructure; } /** * Reads a Well-Known Text representation of a {@link Geometry} from a {@link String}. * * @param wellKnownText one or more &lt;Geometry Tagged Text&gt; strings (see the OpenGIS Simple * Features Specification) separated by whitespace * @return a <code>Geometry</code> specified by <code>wellKnownText</code> * @throws ParseException if a parsing problem occurs */ public Geography read(String wellKnownText) throws ParseException { StringReader reader = new StringReader(wellKnownText); try { return read(reader); } finally { reader.close(); } } /** * Reads a Well-Known Text representation of a {@link * org.apache.sedona.common.S2Geography.Geography} from a {@link Reader}. * * @param reader a Reader which will return a &lt;Geometry Tagged Text&gt; string (see the OpenGIS * Simple Features Specification) * @return a <code>Geometry</code> read from <code>reader</code> * @throws ParseException if a parsing problem occurs */ public Geography read(Reader reader) throws ParseException { StreamTokenizer tokenizer = createTokenizer(reader); try { return readGeometryTaggedText(tokenizer); } catch (IOException e) { throw new ParseException(e.toString()); } } /** * Utility function to create the tokenizer * * @param reader a reader * @return a WKT Tokenizer. */ private static StreamTokenizer createTokenizer(Reader reader) { StreamTokenizer tokenizer = new StreamTokenizer(reader); // set tokenizer to NOT parse numbers tokenizer.resetSyntax(); tokenizer.wordChars('a', 'z'); tokenizer.wordChars('A', 'Z'); tokenizer.wordChars(128 + 32, 255); tokenizer.wordChars('0', '9'); tokenizer.wordChars('-', '-'); tokenizer.wordChars('+', '+'); tokenizer.wordChars('.', '.'); tokenizer.whitespaceChars(0, ' '); tokenizer.commentChar('#'); return tokenizer; } /** * Reads a <code>Coordinate</Code> from a stream using the given {@link StreamTokenizer}. * * <p>All ordinate values are read, but -depending on the {@link CoordinateSequenceFactory} of the * underlying {@link GeometryFactory}- not necessarily all can be handled. Those are silently * dropped. * * @param tokenizer the tokenizer to use * @param ordinateFlags a bit-mask defining the ordinates to read. * @param tryParen a value indicating if a starting {@link #L_PAREN} should be probed. * @return a {@link Coordinate} of appropriate dimension containing the read ordinate values * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered */ private Coordinate getCoordinate( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags, boolean tryParen) throws IOException, ParseException { boolean opened = false; if (tryParen && isOpenerNext(tokenizer)) { tokenizer.nextToken(); opened = true; } // create a sequence for one coordinate int offsetM = ordinateFlags.contains(Ordinate.Z) ? 1 : 0; Coordinate coord = createCoordinate(ordinateFlags); coord.setOrdinate(CoordinateSequence.X, precisionModel.makePrecise(getNextNumber(tokenizer))); coord.setOrdinate(CoordinateSequence.Y, precisionModel.makePrecise(getNextNumber(tokenizer))); // additionally read other vertices if (ordinateFlags.contains(Ordinate.Z)) coord.setOrdinate(CoordinateSequence.Z, getNextNumber(tokenizer)); if (ordinateFlags.contains(Ordinate.M)) coord.setOrdinate(CoordinateSequence.Z + offsetM, getNextNumber(tokenizer)); if (ordinateFlags.size() == 2 && this.isAllowOldJtsCoordinateSyntax && isNumberNext(tokenizer)) { coord.setOrdinate(CoordinateSequence.Z, getNextNumber(tokenizer)); } // read close token if it was opened here if (opened) { getNextCloser(tokenizer); } return coord; } private Coordinate createCoordinate(EnumSet<Ordinate> ordinateFlags) { boolean hasZ = ordinateFlags.contains(Ordinate.Z); boolean hasM = ordinateFlags.contains(Ordinate.M); if (hasZ && hasM) return new CoordinateXYZM(); if (hasM) return new CoordinateXYM(); if (hasZ || this.isAllowOldJtsCoordinateSyntax) return new Coordinate(); return new CoordinateXY(); } /** * Reads a <code>Coordinate</Code> from a stream using the given {@link StreamTokenizer}. * * <p>All ordinate values are read, but -depending on the {@link CoordinateSequenceFactory} of the * underlying {@link GeometryFactory}- not necessarily all can be handled. Those are silently * dropped. * * <p> * * @param tokenizer the tokenizer to use * @param ordinateFlags a bit-mask defining the ordinates to read. * @return a {@link CoordinateSequence} of length 1 containing the read ordinate values * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered */ private CoordinateSequence getCoordinateSequence( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags, int minSize, boolean isRing) throws IOException, ParseException { if (getNextEmptyOrOpener(tokenizer).equals(WKTConstants.EMPTY)) return createCoordinateSequenceEmpty(ordinateFlags); List<Coordinate> coordinates = new ArrayList<Coordinate>(); do { coordinates.add(getCoordinate(tokenizer, ordinateFlags, false)); } while (getNextCloserOrComma(tokenizer).equals(COMMA)); if (isFixStructure) { fixStructure(coordinates, minSize, isRing); } Coordinate[] coordArray = coordinates.toArray(new Coordinate[0]); return csFactory.create(coordArray); } private static void fixStructure(List<Coordinate> coords, int minSize, boolean isRing) { if (coords.size() == 0) return; if (isRing && !isClosed(coords)) { coords.add(coords.get(0).copy()); } while (coords.size() < minSize) { coords.add(coords.get(coords.size() - 1).copy()); } } private static boolean isClosed(List<Coordinate> coords) { if (coords.size() == 0) return true; if (coords.size() == 1 || !coords.get(0).equals2D(coords.get(coords.size() - 1))) { return false; } return true; } private CoordinateSequence createCoordinateSequenceEmpty(EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { return csFactory.create( 0, toDimension(ordinateFlags), ordinateFlags.contains(Ordinate.M) ? 1 : 0); } /** * Reads a <code>CoordinateSequence</Code> from a stream using the given {@link StreamTokenizer} * for an old-style JTS MultiPoint (Point coordinates not enclosed in parentheses). * * <p>All ordinate values are read, but -depending on the {@link CoordinateSequenceFactory} of the * underlying {@link GeometryFactory}- not necessarily all can be handled. Those are silently * dropped. * * @param tokenizer the tokenizer to use * @param ordinateFlags a bit-mask defining the ordinates to read. * @return a {@link CoordinateSequence} of length 1 containing the read ordinate values * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered S */ private CoordinateSequence getCoordinateSequenceOldMultiPoint( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { List<Coordinate> coordinates = new ArrayList<Coordinate>(); do { coordinates.add(getCoordinate(tokenizer, ordinateFlags, true)); } while (getNextCloserOrComma(tokenizer).equals(COMMA)); Coordinate[] coordArray = coordinates.toArray(new Coordinate[0]); return csFactory.create(coordArray); } /** * Computes the required dimension based on the given ordinate values. It is assumed that {@link * Ordinate#X} and {@link Ordinate#Y} are included. * * @param ordinateFlags the ordinate bit-mask * @return the number of dimensions required to store ordinates for the given bit-mask. */ private int toDimension(EnumSet<Ordinate> ordinateFlags) { int dimension = 2; if (ordinateFlags.contains(Ordinate.Z)) dimension++; if (ordinateFlags.contains(Ordinate.M)) dimension++; if (dimension == 2 && this.isAllowOldJtsCoordinateSyntax) dimension++; return dimension; } /** * Tests if the next token in the stream is a number * * @param tokenizer the tokenizer * @return {@code true} if the next token is a number, otherwise {@code false} * @throws IOException if an I/O error occurs */ private static boolean isNumberNext(StreamTokenizer tokenizer) throws IOException { int type = tokenizer.nextToken(); tokenizer.pushBack(); return type == StreamTokenizer.TT_WORD; } /** * Tests if the next token in the stream is a left opener ({@link #L_PAREN}) * * @param tokenizer the tokenizer * @return {@code true} if the next token is a {@link #L_PAREN}, otherwise {@code false} * @throws IOException if an I/O error occurs */ private static boolean isOpenerNext(StreamTokenizer tokenizer) throws IOException { int type = tokenizer.nextToken(); tokenizer.pushBack(); return type == '('; } /** * Parses the next number in the stream. Numbers with exponents are handled. <tt>NaN</tt> values * are handled correctly, and the case of the "NaN" symbol is not significant. * * @param tokenizer tokenizer over a stream of text in Well-known Text * @return the next number in the stream * @throws ParseException if the next token is not a valid number * @throws IOException if an I/O error occurs */ private double getNextNumber(StreamTokenizer tokenizer) throws IOException, ParseException { int type = tokenizer.nextToken(); switch (type) { case StreamTokenizer.TT_WORD: { if (tokenizer.sval.equalsIgnoreCase(NAN_SYMBOL)) { return Double.NaN; } else { try { return Double.parseDouble(tokenizer.sval); } catch (NumberFormatException ex) { throw parseErrorWithLine(tokenizer, "Invalid number: " + tokenizer.sval); } } } } throw parseErrorExpected(tokenizer, "number"); } /** * Returns the next EMPTY or L_PAREN in the stream as uppercase text. * * @return the next EMPTY or L_PAREN in the stream as uppercase text. * @throws ParseException if the next token is not EMPTY or L_PAREN * @throws IOException if an I/O error occurs * @param tokenizer tokenizer over a stream of text in Well-known Text */ private static String getNextEmptyOrOpener(StreamTokenizer tokenizer) throws IOException, ParseException { String nextWord = getNextWord(tokenizer); if (nextWord.equalsIgnoreCase(WKTConstants.Z)) { // z = true; nextWord = getNextWord(tokenizer); } else if (nextWord.equalsIgnoreCase(WKTConstants.M)) { // m = true; nextWord = getNextWord(tokenizer); } else if (nextWord.equalsIgnoreCase(WKTConstants.ZM)) { // z = true; // m = true; nextWord = getNextWord(tokenizer); } if (nextWord.equals(WKTConstants.EMPTY) || nextWord.equals(L_PAREN)) { return nextWord; } throw parseErrorExpected(tokenizer, WKTConstants.EMPTY + " or " + L_PAREN); } /** * Returns the next ordinate flag information in the stream as uppercase text. This can be Z, M or * ZM. * * @return the next EMPTY or L_PAREN in the stream as uppercase text. * @throws ParseException if the next token is not EMPTY or L_PAREN * @throws IOException if an I/O error occurs * @param tokenizer tokenizer over a stream of text in Well-known Text */ private static EnumSet<Ordinate> getNextOrdinateFlags(StreamTokenizer tokenizer) throws IOException, ParseException { EnumSet<Ordinate> result = EnumSet.of(Ordinate.X, Ordinate.Y); String nextWord = lookAheadWord(tokenizer).toUpperCase(Locale.ROOT); if (nextWord.equalsIgnoreCase(WKTConstants.Z)) { tokenizer.nextToken(); result.add(Ordinate.Z); } else if (nextWord.equalsIgnoreCase(WKTConstants.M)) { tokenizer.nextToken(); result.add(Ordinate.M); } else if (nextWord.equalsIgnoreCase(WKTConstants.ZM)) { tokenizer.nextToken(); result.add(Ordinate.Z); result.add(Ordinate.M); } return result; } /** * Returns the next word in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next token must * be a word. * @return the next word in the stream as uppercase text * @throws ParseException if the next token is not a word * @throws IOException if an I/O error occurs */ private static String lookAheadWord(StreamTokenizer tokenizer) throws IOException, ParseException { String nextWord = getNextWord(tokenizer); tokenizer.pushBack(); return nextWord; } /** * Returns the next {@link #R_PAREN} or {@link #COMMA} in the stream. * * @return the next R_PAREN or COMMA in the stream * @throws ParseException if the next token is not R_PAREN or COMMA * @throws IOException if an I/O error occurs * @param tokenizer tokenizer over a stream of text in Well-known Text */ private static String getNextCloserOrComma(StreamTokenizer tokenizer) throws IOException, ParseException { String nextWord = getNextWord(tokenizer); if (nextWord.equals(COMMA) || nextWord.equals(R_PAREN)) { return nextWord; } throw parseErrorExpected(tokenizer, COMMA + " or " + R_PAREN); } /** * Returns the next {@link #R_PAREN} in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next token must * be R_PAREN. * @return the next R_PAREN in the stream * @throws ParseException if the next token is not R_PAREN * @throws IOException if an I/O error occurs */ private String getNextCloser(StreamTokenizer tokenizer) throws IOException, ParseException { String nextWord = getNextWord(tokenizer); if (nextWord.equals(R_PAREN)) { return nextWord; } throw parseErrorExpected(tokenizer, R_PAREN); } /** * Returns the next word in the stream. * * @return the next word in the stream as uppercase text * @throws ParseException if the next token is not a word * @throws IOException if an I/O error occurs * @param tokenizer tokenizer over a stream of text in Well-known Text */ private static String getNextWord(StreamTokenizer tokenizer) throws IOException, ParseException { int type = tokenizer.nextToken(); switch (type) { case StreamTokenizer.TT_WORD: String word = tokenizer.sval; if (word.equalsIgnoreCase(WKTConstants.EMPTY)) return WKTConstants.EMPTY; return word; case '(': return L_PAREN; case ')': return R_PAREN; case ',': return COMMA; } throw parseErrorExpected(tokenizer, "word"); } /** * Creates a formatted ParseException reporting that the current token was unexpected. * * @param expected a description of what was expected */ private static ParseException parseErrorExpected(StreamTokenizer tokenizer, String expected) { // throws Asserts for tokens that should never be seen if (tokenizer.ttype == StreamTokenizer.TT_NUMBER) Assert.shouldNeverReachHere("Unexpected NUMBER token"); if (tokenizer.ttype == StreamTokenizer.TT_EOL) Assert.shouldNeverReachHere("Unexpected EOL token"); String tokenStr = tokenString(tokenizer); return parseErrorWithLine(tokenizer, "Expected " + expected + " but found " + tokenStr); } /** * Creates a formatted ParseException reporting that the current token was unexpected. * * @param msg a description of what was expected */ private static ParseException parseErrorWithLine(StreamTokenizer tokenizer, String msg) { return new ParseException(msg + " (line " + tokenizer.lineno() + ")"); } /** * Gets a description of the current token type * * @param tokenizer the tokenizer * @return a description of the current token */ private static String tokenString(StreamTokenizer tokenizer) { switch (tokenizer.ttype) { case StreamTokenizer.TT_NUMBER: return "<NUMBER>"; case StreamTokenizer.TT_EOL: return "End-of-Line"; case StreamTokenizer.TT_EOF: return "End-of-Stream"; case StreamTokenizer.TT_WORD: return "'" + tokenizer.sval + "'"; } return "'" + (char) tokenizer.ttype + "'"; } /** * Creates a <code>Geometry</code> using the next token in the stream. * * @return a <code>Geometry</code> specified by the next token in the stream * @throws ParseException if the coordinates used to create a <code>Polygon</code> shell and holes * do not form closed linestrings, or if an unexpected token was encountered * @throws IOException if an I/O error occurs * @param tokenizer tokenizer over a stream of text in Well-known Text */ private org.apache.sedona.common.S2Geography.Geography readGeometryTaggedText( StreamTokenizer tokenizer) throws IOException, ParseException { String type; EnumSet<Ordinate> ordinateFlags = EnumSet.of(Ordinate.X, Ordinate.Y); type = getNextWord(tokenizer).toUpperCase(Locale.ROOT); if (type.endsWith(WKTConstants.ZM)) { ordinateFlags.add(Ordinate.Z); ordinateFlags.add(Ordinate.M); } else if (type.endsWith(WKTConstants.Z)) { ordinateFlags.add(Ordinate.Z); } else if (type.endsWith(WKTConstants.M)) { ordinateFlags.add(Ordinate.M); } return readGeometryTaggedText(tokenizer, type, ordinateFlags); } private org.apache.sedona.common.S2Geography.Geography readGeometryTaggedText( StreamTokenizer tokenizer, String type, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { if (ordinateFlags.size() == 2) { ordinateFlags = getNextOrdinateFlags(tokenizer); } // if we can create a sequence with the required dimension everything is ok, otherwise // we need to take a different coordinate sequence factory. // It would be good to not have to try/catch this but if the CoordinateSequenceFactory // exposed a value indicating which min/max dimension it can handle or even an // ordinate bit-flag. try { csFactory.create(0, toDimension(ordinateFlags), ordinateFlags.contains(Ordinate.M) ? 1 : 0); } catch (Exception e) { geometryFactory = new GeometryFactory( geometryFactory.getPrecisionModel(), geometryFactory.getSRID(), csFactoryXYZM); } if (isTypeName(tokenizer, type, WKTConstants.POINT)) { return readPointText(tokenizer, ordinateFlags); } else if (isTypeName(tokenizer, type, WKTConstants.LINESTRING)) { return readPolylineText(tokenizer, ordinateFlags); } else if (isTypeName(tokenizer, type, WKTConstants.LINEARRING)) { return readPolygonText(tokenizer, ordinateFlags); } else if (isTypeName(tokenizer, type, WKTConstants.POLYGON)) { return readPolygonText(tokenizer, ordinateFlags); } else if (isTypeName(tokenizer, type, WKTConstants.MULTIPOINT)) { return readMultiPointText(tokenizer, ordinateFlags); } else if (isTypeName(tokenizer, type, WKTConstants.MULTILINESTRING)) { return readMultiPolylineText(tokenizer, ordinateFlags); } else if (isTypeName(tokenizer, type, WKTConstants.MULTIPOLYGON)) { return readMultiPolygonText(tokenizer, ordinateFlags); } else if (isTypeName(tokenizer, type, WKTConstants.GEOMETRYCOLLECTION)) { return readGeographyCollectionText(tokenizer, ordinateFlags); } throw parseErrorWithLine(tokenizer, "Unknown geography type: " + type); } private boolean isTypeName(StreamTokenizer tokenizer, String type, String typeName) throws ParseException { if (!type.startsWith(typeName)) return false; String modifiers = type.substring(typeName.length()); boolean isValidMod = modifiers.length() <= 2 && (modifiers.length() == 0 || modifiers.equals(WKTConstants.Z) || modifiers.equals(WKTConstants.M) || modifiers.equals(WKTConstants.ZM)); if (!isValidMod) { throw parseErrorWithLine(tokenizer, "Invalid dimension modifiers: " + type); } return true; } /** * Creates a <code>Point</code> using the next token in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;Point Text&gt;. * @return a <code>Point</code> specified by the next token in the stream * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered */ private SinglePointGeography readPointText( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { CoordinateSequence pts = getCoordinateSequence(tokenizer, ordinateFlags, 1, false); // If X and Y are NaN create a empty point if (pts.size() <= 0 || Double.isNaN(pts.getX(0)) || Double.isNaN(pts.getY(0))) { return new SinglePointGeography(); } double lon = pts.getX(0); double lat = pts.getY(0); S2Point s2Point = S2LatLng.fromDegrees(lat, lon).toPoint(); // Build via S2Builder + S2PointVectorLayer S2Builder builder = new S2Builder.Builder().build(); S2PointVectorLayer layer = new S2PointVectorLayer(); builder.startLayer(layer); builder.addPoint(s2Point); // must call build() before reading out the points S2Error error = new S2Error(); if (!builder.build(error)) { throw new IOException("Failed to build S2 point layer: " + error.text()); } // Extract the resulting points List<S2Point> points = layer.getPointVector(); if (points.isEmpty()) { return new SinglePointGeography(); } return new SinglePointGeography(points.get(0)); } /** * Creates a <code>LineString</code> using the next token in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;LineString Text&gt;. * @return a <code>LineString</code> specified by the next token in the stream * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered */ private SinglePolylineGeography readPolylineText( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { CoordinateSequence seq = getCoordinateSequence(tokenizer, ordinateFlags, LineString.MINIMUM_VALID_SIZE, false); if (seq.size() < 2) { // empty or extended-but-all-NaN → empty geography return new SinglePolylineGeography(); } List<S2Point> pts = new ArrayList<>(seq.size()); for (int i = 0; i < seq.size(); i++) { double lon = seq.getX(i); double lat = seq.getY(i); pts.add(S2LatLng.fromDegrees(lat, lon).toPoint()); } S2Builder builder = new S2Builder.Builder().build(); S2PolylineLayer layer = new S2PolylineLayer(); builder.startLayer(layer); builder.addPolyline(new S2Polyline(pts)); S2Error error = new S2Error(); if (!builder.build(error)) { throw new IOException("Failed to build S2 polyline: " + error.text()); } S2Polyline s2poly = layer.getPolyline(); return new SinglePolylineGeography(s2poly); } /** * Creates a <code>LinearRing</code> using the next token in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;LineString Text&gt;. * @return a <code>LinearRing</code> specified by the next token in the stream * @throws IOException if an I/O error occurs * @throws ParseException if the coordinates used to create the <code>LinearRing</code> do not * form a closed linestring, or if an unexpected token was encountered */ private S2Loop readLoopText(StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { CoordinateSequence seq = getCoordinateSequence(tokenizer, ordinateFlags, LinearRing.MINIMUM_VALID_SIZE, true); // build the loop List<S2Point> pts = new ArrayList<>(seq.size()); for (int i = 0; i < seq.size(); i++) { pts.add(S2LatLng.fromDegrees(seq.getY(i), seq.getX(i)).toPoint()); } return new S2Loop(pts); } /** * Creates a <code>MultiPoint</code> using the next tokens in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;MultiPoint Text&gt;. * @return a <code>MultiPoint</code> specified by the next token in the stream * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered */ private PointGeography readMultiPointText( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { String nextToken = getNextEmptyOrOpener(tokenizer); if (nextToken.equals(WKTConstants.EMPTY)) { return new PointGeography(); } // check for old-style JTS syntax (no parentheses surrounding Point coordinates) and parse it if // present // MD 2009-02-21 - this is only provided for backwards compatibility for a few versions if (isAllowOldJtsMultipointSyntax) { String nextWord = lookAheadWord(tokenizer); if (nextWord != L_PAREN && nextWord != WKTConstants.EMPTY) { CoordinateSequence pts = getCoordinateSequenceOldMultiPoint(tokenizer, ordinateFlags); if (Double.isNaN(pts.getX(0)) || Double.isNaN(pts.getY(0))) { return new PointGeography(); } List<S2Point> points = new ArrayList<>(pts.size()); // Build via S2Builder + S2PointVectorLayer S2Builder builder = new S2Builder.Builder().build(); S2PointVectorLayer layer = new S2PointVectorLayer(); // must call build() before reading out the points S2Error error = new S2Error(); for (int i = 0; i < pts.size(); i++) { double lon = pts.getX(i); double lat = pts.getY(i); S2Point s2Point = S2LatLng.fromDegrees(lat, lon).toPoint(); builder.startLayer(layer); builder.addPoint(s2Point); if (!builder.build(error)) { throw new IOException("Failed to build S2 point layer: " + error.text()); } } for (int i = 0; i < layer.getPointVector().size(); i++) { // Extract the resulting points points.add(layer.getPointVector().get(i)); } return new PointGeography(points); } } List<S2Point> points = new ArrayList<S2Point>(); PointGeography point = readPointText(tokenizer, ordinateFlags); points.addAll(point.getPoints()); nextToken = getNextCloserOrComma(tokenizer); while (nextToken.equals(COMMA)) { point = readPointText(tokenizer, ordinateFlags); points.addAll(point.getPoints()); nextToken = getNextCloserOrComma(tokenizer); } return new PointGeography(points); } /** * Creates a <code>Polygon</code> using the next token in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;Polygon Text&gt;. * @return a <code>Polygon</code> specified by the next token in the stream * @throws ParseException if the coordinates used to create the <code>Polygon</code> shell and * holes do not form closed linestrings, or if an unexpected token was encountered. * @throws IOException if an I/O error occurs */ private PolygonGeography readPolygonText( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { String nextToken = getNextEmptyOrOpener(tokenizer); if (nextToken.equals(WKTConstants.EMPTY)) { return new PolygonGeography(); } List<S2Loop> holes = new ArrayList<S2Loop>(); S2Loop shell = readLoopText(tokenizer, ordinateFlags); holes.add(shell); nextToken = getNextCloserOrComma(tokenizer); while (nextToken.equals(COMMA)) { S2Loop hole = readLoopText(tokenizer, ordinateFlags); holes.add(hole); nextToken = getNextCloserOrComma(tokenizer); } // Now feed those loops into S2Builder + S2PolygonLayer: S2Builder builder = new S2Builder.Builder().build(); S2PolygonLayer polyLayer = new S2PolygonLayer(); builder.startLayer(polyLayer); // add shell + holes for (S2Loop loop : holes) { builder.addLoop(loop); } // build S2Error error = new S2Error(); if (!builder.build(error)) { throw new IOException("S2Builder failed: " + error.text()); } // extract the stitched polygon S2Polygon s2poly = polyLayer.getPolygon(); // wrap in your PolygonGeography return new PolygonGeography(s2poly); } /** * Creates a <code>MultiLineString</code> using the next token in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;MultiLineString Text&gt;. * @return a <code>MultiLineString</code> specified by the next token in the stream * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered */ private PolylineGeography readMultiPolylineText( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { String nextToken = getNextEmptyOrOpener(tokenizer); if (nextToken.equals(WKTConstants.EMPTY)) { return new PolylineGeography(); } List<S2Polyline> lineStrings = new ArrayList<S2Polyline>(); do { PolylineGeography lineString = readPolylineText(tokenizer, ordinateFlags); lineStrings.addAll(lineString.getPolylines()); nextToken = getNextCloserOrComma(tokenizer); } while (nextToken.equals(COMMA)); return new PolylineGeography(lineStrings); } /** * Creates a <code>MultiPolygon</code> using the next token in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;MultiPolygon Text&gt;. * @return a <code>MultiPolygon</code> specified by the next token in the stream, or if if the * coordinates used to create the <code>Polygon</code> shells and holes do not form closed * linestrings. * @throws IOException if an I/O error occurs * @throws ParseException if an unexpected token was encountered */ private MultiPolygonGeography readMultiPolygonText( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { String nextToken = getNextEmptyOrOpener(tokenizer); if (nextToken.equals(WKTConstants.EMPTY)) { return new MultiPolygonGeography(Geography.GeographyKind.MULTIPOLYGON, new ArrayList<>()); } List<S2Polygon> polygons = new ArrayList<S2Polygon>(); do { PolygonGeography polygon = readPolygonText(tokenizer, ordinateFlags); polygons.add(polygon.polygon); nextToken = getNextCloserOrComma(tokenizer); } while (nextToken.equals(COMMA)); return new MultiPolygonGeography(Geography.GeographyKind.MULTIPOLYGON, polygons); } /** * Creates a <code>GeometryCollection</code> using the next token in the stream. * * @param tokenizer tokenizer over a stream of text in Well-known Text format. The next tokens * must form a &lt;GeometryCollection Text&gt;. * @return a <code>GeometryCollection</code> specified by the next token in the stream * @throws ParseException if the coordinates used to create a <code>Polygon</code> shell and holes * do not form closed linestrings, or if an unexpected token was encountered * @throws IOException if an I/O error occurs */ private GeographyCollection readGeographyCollectionText( StreamTokenizer tokenizer, EnumSet<Ordinate> ordinateFlags) throws IOException, ParseException { String nextToken = getNextEmptyOrOpener(tokenizer); if (nextToken.equals(WKTConstants.EMPTY)) { return new GeographyCollection(); } List<org.apache.sedona.common.S2Geography.Geography> geometries = new ArrayList<org.apache.sedona.common.S2Geography.Geography>(); do { org.apache.sedona.common.S2Geography.Geography geometry = readGeometryTaggedText(tokenizer); geometries.add(geometry); nextToken = getNextCloserOrComma(tokenizer); } while (nextToken.equals(COMMA)); return new GeographyCollection(geometries); } }
googleapis/google-cloud-java
36,716
java-orchestration-airflow/proto-google-cloud-orchestration-airflow-v1beta1/src/main/java/com/google/cloud/orchestration/airflow/service/v1beta1/CreateUserWorkloadsConfigMapRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/orchestration/airflow/service/v1beta1/environments.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.orchestration.airflow.service.v1beta1; /** * * * <pre> * Create user workloads ConfigMap request. * </pre> * * Protobuf type {@code * google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest} */ public final class CreateUserWorkloadsConfigMapRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest) CreateUserWorkloadsConfigMapRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateUserWorkloadsConfigMapRequest.newBuilder() to construct. private CreateUserWorkloadsConfigMapRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateUserWorkloadsConfigMapRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateUserWorkloadsConfigMapRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsConfigMapRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsConfigMapRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest.class, com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The environment name to create a ConfigMap for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The environment name to create a ConfigMap for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int USER_WORKLOADS_CONFIG_MAP_FIELD_NUMBER = 2; private com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap userWorkloadsConfigMap_; /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userWorkloadsConfigMap field is set. */ @java.lang.Override public boolean hasUserWorkloadsConfigMap() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userWorkloadsConfigMap. */ @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap getUserWorkloadsConfigMap() { return userWorkloadsConfigMap_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap .getDefaultInstance() : userWorkloadsConfigMap_; } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMapOrBuilder getUserWorkloadsConfigMapOrBuilder() { return userWorkloadsConfigMap_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap .getDefaultInstance() : userWorkloadsConfigMap_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUserWorkloadsConfigMap()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUserWorkloadsConfigMap()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest)) { return super.equals(obj); } com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest other = (com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasUserWorkloadsConfigMap() != other.hasUserWorkloadsConfigMap()) return false; if (hasUserWorkloadsConfigMap()) { if (!getUserWorkloadsConfigMap().equals(other.getUserWorkloadsConfigMap())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasUserWorkloadsConfigMap()) { hash = (37 * hash) + USER_WORKLOADS_CONFIG_MAP_FIELD_NUMBER; hash = (53 * hash) + getUserWorkloadsConfigMap().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Create user workloads ConfigMap request. * </pre> * * Protobuf type {@code * google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest) com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsConfigMapRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsConfigMapRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest.class, com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest.Builder.class); } // Construct using // com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUserWorkloadsConfigMapFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; userWorkloadsConfigMap_ = null; if (userWorkloadsConfigMapBuilder_ != null) { userWorkloadsConfigMapBuilder_.dispose(); userWorkloadsConfigMapBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsConfigMapRequest_descriptor; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest getDefaultInstanceForType() { return com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest build() { com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest buildPartial() { com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest result = new com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.userWorkloadsConfigMap_ = userWorkloadsConfigMapBuilder_ == null ? userWorkloadsConfigMap_ : userWorkloadsConfigMapBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest) { return mergeFrom( (com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest other) { if (other == com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasUserWorkloadsConfigMap()) { mergeUserWorkloadsConfigMap(other.getUserWorkloadsConfigMap()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getUserWorkloadsConfigMapFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The environment name to create a ConfigMap for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The environment name to create a ConfigMap for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The environment name to create a ConfigMap for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The environment name to create a ConfigMap for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The environment name to create a ConfigMap for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap userWorkloadsConfigMap_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap.Builder, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMapOrBuilder> userWorkloadsConfigMapBuilder_; /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userWorkloadsConfigMap field is set. */ public boolean hasUserWorkloadsConfigMap() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userWorkloadsConfigMap. */ public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap getUserWorkloadsConfigMap() { if (userWorkloadsConfigMapBuilder_ == null) { return userWorkloadsConfigMap_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap .getDefaultInstance() : userWorkloadsConfigMap_; } else { return userWorkloadsConfigMapBuilder_.getMessage(); } } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserWorkloadsConfigMap( com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap value) { if (userWorkloadsConfigMapBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userWorkloadsConfigMap_ = value; } else { userWorkloadsConfigMapBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserWorkloadsConfigMap( com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap.Builder builderForValue) { if (userWorkloadsConfigMapBuilder_ == null) { userWorkloadsConfigMap_ = builderForValue.build(); } else { userWorkloadsConfigMapBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUserWorkloadsConfigMap( com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap value) { if (userWorkloadsConfigMapBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && userWorkloadsConfigMap_ != null && userWorkloadsConfigMap_ != com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap .getDefaultInstance()) { getUserWorkloadsConfigMapBuilder().mergeFrom(value); } else { userWorkloadsConfigMap_ = value; } } else { userWorkloadsConfigMapBuilder_.mergeFrom(value); } if (userWorkloadsConfigMap_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUserWorkloadsConfigMap() { bitField0_ = (bitField0_ & ~0x00000002); userWorkloadsConfigMap_ = null; if (userWorkloadsConfigMapBuilder_ != null) { userWorkloadsConfigMapBuilder_.dispose(); userWorkloadsConfigMapBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap.Builder getUserWorkloadsConfigMapBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUserWorkloadsConfigMapFieldBuilder().getBuilder(); } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMapOrBuilder getUserWorkloadsConfigMapOrBuilder() { if (userWorkloadsConfigMapBuilder_ != null) { return userWorkloadsConfigMapBuilder_.getMessageOrBuilder(); } else { return userWorkloadsConfigMap_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap .getDefaultInstance() : userWorkloadsConfigMap_; } } /** * * * <pre> * Required. User workloads ConfigMap to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap user_workloads_config_map = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap.Builder, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMapOrBuilder> getUserWorkloadsConfigMapFieldBuilder() { if (userWorkloadsConfigMapBuilder_ == null) { userWorkloadsConfigMapBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsConfigMap .Builder, com.google.cloud.orchestration.airflow.service.v1beta1 .UserWorkloadsConfigMapOrBuilder>( getUserWorkloadsConfigMap(), getParentForChildren(), isClean()); userWorkloadsConfigMap_ = null; } return userWorkloadsConfigMapBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest) } // @@protoc_insertion_point(class_scope:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest) private static final com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest(); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsConfigMapRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateUserWorkloadsConfigMapRequest> PARSER = new com.google.protobuf.AbstractParser<CreateUserWorkloadsConfigMapRequest>() { @java.lang.Override public CreateUserWorkloadsConfigMapRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateUserWorkloadsConfigMapRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateUserWorkloadsConfigMapRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsConfigMapRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/polygene-java
36,678
libraries/alarm/src/test/java/org/apache/polygene/library/alarm/ExtendedAlarmModelTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package org.apache.polygene.library.alarm; import java.time.Instant; import java.util.List; import java.util.Locale; import org.apache.polygene.api.identity.Identity; import org.junit.Test; import org.apache.polygene.api.entity.EntityBuilder; import org.apache.polygene.api.mixin.Mixins; import org.apache.polygene.api.service.ServiceComposite; import org.apache.polygene.api.unitofwork.UnitOfWork; import org.apache.polygene.api.value.ValueBuilder; import org.apache.polygene.bootstrap.AssemblyException; import org.apache.polygene.bootstrap.ModuleAssembly; import org.apache.polygene.test.AbstractPolygeneTest; import org.apache.polygene.test.EntityTestAssembler; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class ExtendedAlarmModelTest extends AbstractPolygeneTest { @SuppressWarnings( { "unchecked" } ) @Override public void assemble( ModuleAssembly module ) throws AssemblyException { module.services( TestAlarmModel.class ); module.services( AlarmSystemService.class ); new EntityTestAssembler().assemble( module ); module.entities( AlarmPointEntity.class ); module.forMixin( AlarmHistory.class ).declareDefaults().maxSize().set( 10 ); module.values( AlarmEvent.class ); module.values( AlarmCategory.class ); module.values( AlarmStatus.class ); } @Mixins( ExtendedAlarmModelService.ExtendedAlarmModelMixin.class ) public interface TestAlarmModel extends AlarmModel, ServiceComposite { } @Override public void setUp() throws Exception { super.setUp(); unitOfWorkFactory.newUnitOfWork(); } @Override public void tearDown() throws Exception { UnitOfWork uow = unitOfWorkFactory.currentUnitOfWork(); if( uow != null ) { uow.discard(); } super.tearDown(); } @Test public void testDescription() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); boolean test1 = provider.modelDescription().toLowerCase().contains( "normal" ); boolean test2 = provider.modelDescription().toLowerCase().contains( "activated" ); boolean test3 = provider.modelDescription().toLowerCase().contains( "deactivated" ); boolean test4 = provider.modelDescription().toLowerCase().contains( "acknowledged" ); boolean test5 = provider.modelDescription().toLowerCase().contains( "activation" ); boolean test6 = provider.modelDescription().toLowerCase().contains( "deactivation" ); boolean test7 = provider.modelDescription().toLowerCase().contains( "acknowledge" ); boolean test8 = provider.modelDescription().toLowerCase().contains( "block" ); boolean test9 = provider.modelDescription().toLowerCase().contains( "unblock" ); boolean test10 = provider.modelDescription().toLowerCase().contains( "disable" ); boolean test11 = provider.modelDescription().toLowerCase().contains( "enable" ); assertTrue( test1 && test2 && test3 && test4 && test5 && test6 && test7 && test8 && test9 && test10 && test11 ); Locale english = new Locale( "en" ); test1 = provider.modelDescription( english ).toLowerCase().contains( "normal" ); test2 = provider.modelDescription( english ).toLowerCase().contains( "activated" ); test3 = provider.modelDescription( english ).toLowerCase().contains( "deactivated" ); test4 = provider.modelDescription( english ).toLowerCase().contains( "acknowledged" ); test5 = provider.modelDescription( english ).toLowerCase().contains( "activation" ); test6 = provider.modelDescription( english ).toLowerCase().contains( "deactivation" ); test7 = provider.modelDescription( english ).toLowerCase().contains( "acknowledge" ); test8 = provider.modelDescription( english ).toLowerCase().contains( "block" ); test9 = provider.modelDescription( english ).toLowerCase().contains( "unblock" ); test10 = provider.modelDescription( english ).toLowerCase().contains( "disable" ); test11 = provider.modelDescription( english ).toLowerCase().contains( "enable" ); assertTrue( test1 && test2 && test3 && test4 && test5 && test6 && test7 && test8 && test9 && test10 && test11 ); } @Test public void testTriggers() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); List<String> triggers = provider.alarmTriggers(); assertEquals( 7, triggers.size() ); int result = 0; for( String trigger : triggers ) { if( AlarmPoint.TRIGGER_ACTIVATE.equals( trigger ) ) { result |= 1; } if( AlarmPoint.TRIGGER_DEACTIVATE.equals( trigger ) ) { result |= 2; } if( AlarmPoint.TRIGGER_ACKNOWLEDGE.equals( trigger ) ) { result |= 4; } if( AlarmPoint.TRIGGER_BLOCK.equals( trigger ) ) { result |= 8; } if( AlarmPoint.TRIGGER_UNBLOCK.equals( trigger ) ) { result |= 16; } if( AlarmPoint.TRIGGER_DISABLE.equals( trigger ) ) { result |= 32; } if( AlarmPoint.TRIGGER_ENABLE.equals( trigger ) ) { result |= 64; } } assertEquals( 127, result ); assertEquals( AlarmPoint.STATUS_NORMAL, underTest.currentStatus().name( null ) ); } @Test public void testStateChangeFromNormal() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertEquals( AlarmPoint.EVENT_ACTIVATION, event1.systemName().get() ); alarm = createAlarm( "Another 2" ); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertNull( event2 ); alarm = createAlarm( "Another 3" ); AlarmEvent event3 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACKNOWLEDGE ); assertNull( event3 ); alarm = createAlarm( "Another 4" ); AlarmEvent event4 = provider.evaluate( alarm, AlarmPoint.TRIGGER_BLOCK ); assertEquals( AlarmPoint.EVENT_BLOCKING, event4.systemName().get() ); alarm = createAlarm( "Another 5" ); AlarmEvent event5 = provider.evaluate( alarm, AlarmPoint.TRIGGER_UNBLOCK ); assertNull( event5 ); alarm = createAlarm( "Another 6" ); AlarmEvent event6 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DISABLE ); assertEquals( AlarmPoint.EVENT_DISABLING, event6.systemName().get() ); alarm = createAlarm( "Another 7" ); AlarmEvent event7 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ENABLE ); assertNull( event7 ); } @Test public void testStateChangeFromActivated() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); alarm.activate(); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertNull( event1 ); alarm = createAlarm( "Another 2" ); alarm.activate(); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertEquals( AlarmPoint.EVENT_DEACTIVATION, event2.systemName().get() ); alarm = createAlarm( "Another 3" ); alarm.activate(); AlarmEvent event3 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACKNOWLEDGE ); assertEquals( AlarmPoint.EVENT_ACKNOWLEDGEMENT, event3.systemName().get() ); alarm = createAlarm( "Another 4" ); alarm.activate(); AlarmEvent event4 = provider.evaluate( alarm, AlarmPoint.TRIGGER_BLOCK ); assertEquals( AlarmPoint.EVENT_BLOCKING, event4.systemName().get() ); alarm = createAlarm( "Another 5" ); alarm.activate(); AlarmEvent event5 = provider.evaluate( alarm, AlarmPoint.TRIGGER_UNBLOCK ); assertNull( event5 ); alarm = createAlarm( "Another 6" ); alarm.activate(); AlarmEvent event6 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DISABLE ); assertEquals( AlarmPoint.EVENT_DISABLING, event6.systemName().get() ); alarm = createAlarm( "Another 7" ); alarm.activate(); AlarmEvent event7 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ENABLE ); assertNull( event7 ); } @Test public void testStateChangeFromAcknowledged() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); alarm.activate(); alarm.acknowledge(); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertNull( event1 ); alarm = createAlarm( "Another 2" ); alarm.activate(); alarm.acknowledge(); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertEquals( AlarmPoint.EVENT_DEACTIVATION, event2.systemName().get() ); alarm = createAlarm( "Another 3" ); alarm.activate(); alarm.acknowledge(); AlarmEvent event3 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACKNOWLEDGE ); assertNull( event3 ); alarm = createAlarm( "Another 4" ); alarm.activate(); alarm.acknowledge(); AlarmEvent event4 = provider.evaluate( alarm, AlarmPoint.TRIGGER_BLOCK ); assertEquals( AlarmPoint.EVENT_BLOCKING, event4.systemName().get() ); alarm = createAlarm( "Another 5" ); alarm.activate(); alarm.acknowledge(); AlarmEvent event5 = provider.evaluate( alarm, AlarmPoint.TRIGGER_UNBLOCK ); assertNull( event5 ); alarm = createAlarm( "Another 6" ); alarm.activate(); alarm.acknowledge(); AlarmEvent event6 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DISABLE ); assertEquals( AlarmPoint.EVENT_DISABLING, event6.systemName().get() ); alarm = createAlarm( "Another 7" ); alarm.activate(); alarm.acknowledge(); AlarmEvent event7 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ENABLE ); assertNull( event7 ); } @Test public void testStateChangeFromDeactivated() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); alarm.activate(); alarm.deactivate(); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertEquals( AlarmPoint.EVENT_ACTIVATION, event1.systemName().get() ); alarm = createAlarm( "Another 2" ); alarm.activate(); alarm.deactivate(); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertNull( event2 ); alarm = createAlarm( "Another 3" ); alarm.activate(); alarm.deactivate(); AlarmEvent event3 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACKNOWLEDGE ); assertEquals( AlarmPoint.EVENT_ACKNOWLEDGEMENT, event3.systemName().get() ); alarm = createAlarm( "Another 4" ); alarm.activate(); alarm.deactivate(); AlarmEvent event4 = provider.evaluate( alarm, AlarmPoint.TRIGGER_BLOCK ); assertEquals( AlarmPoint.EVENT_BLOCKING, event4.systemName().get() ); alarm = createAlarm( "Another 5" ); alarm.activate(); alarm.deactivate(); AlarmEvent event5 = provider.evaluate( alarm, AlarmPoint.TRIGGER_UNBLOCK ); assertNull( event5 ); alarm = createAlarm( "Another 6" ); alarm.activate(); alarm.deactivate(); AlarmEvent event6 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DISABLE ); assertEquals( AlarmPoint.EVENT_DISABLING, event6.systemName().get() ); alarm = createAlarm( "Another 7" ); alarm.activate(); alarm.deactivate(); AlarmEvent event7 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ENABLE ); assertNull( event7 ); } @Test public void testStateChangeFromBlocked() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); alarm.activate(); alarm.trigger( "block" ); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertNull( event1 ); alarm = createAlarm( "Another 2" ); alarm.activate(); alarm.trigger( "block" ); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertNull( event2 ); alarm = createAlarm( "Another 3" ); alarm.activate(); alarm.trigger( "block" ); AlarmEvent event3 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACKNOWLEDGE ); assertNull( event3 ); alarm = createAlarm( "Another 4" ); alarm.activate(); alarm.trigger( "block" ); AlarmEvent event4 = provider.evaluate( alarm, AlarmPoint.TRIGGER_BLOCK ); assertNull( event4 ); alarm = createAlarm( "Another 5" ); alarm.activate(); alarm.trigger( "block" ); AlarmEvent event5 = provider.evaluate( alarm, AlarmPoint.TRIGGER_UNBLOCK ); assertEquals( AlarmPoint.EVENT_UNBLOCKING, event5.systemName().get() ); alarm = createAlarm( "Another 6" ); alarm.activate(); alarm.trigger( "block" ); AlarmEvent event6 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DISABLE ); assertEquals( AlarmPoint.EVENT_DISABLING, event6.systemName().get() ); alarm = createAlarm( "Another 7" ); alarm.activate(); alarm.trigger( "block" ); AlarmEvent event7 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ENABLE ); assertNull( event7 ); } @Test public void testStateChangeFromDisabled() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint alarm = createAlarm( "Another 1" ); alarm.activate(); alarm.trigger( "disable" ); AlarmEvent event1 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACTIVATE ); assertNull( event1 ); alarm = createAlarm( "Another 2" ); alarm.activate(); alarm.trigger( "disable" ); AlarmEvent event2 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DEACTIVATE ); assertNull( event2 ); alarm = createAlarm( "Another 3" ); alarm.activate(); alarm.trigger( "disable" ); AlarmEvent event3 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ACKNOWLEDGE ); assertNull( event3 ); alarm = createAlarm( "Another 4" ); alarm.activate(); alarm.trigger( "disable" ); AlarmEvent event4 = provider.evaluate( alarm, AlarmPoint.TRIGGER_BLOCK ); assertNull( event4 ); alarm = createAlarm( "Another 5" ); alarm.activate(); alarm.trigger( "disable" ); AlarmEvent event5 = provider.evaluate( alarm, AlarmPoint.TRIGGER_UNBLOCK ); assertNull( event5 ); alarm = createAlarm( "Another 6" ); alarm.activate(); alarm.trigger( "disable" ); AlarmEvent event6 = provider.evaluate( alarm, AlarmPoint.TRIGGER_DISABLE ); assertNull( event6 ); alarm = createAlarm( "Another 7" ); alarm.activate(); alarm.trigger( "disable" ); AlarmEvent event7 = provider.evaluate( alarm, AlarmPoint.TRIGGER_ENABLE ); assertEquals( AlarmPoint.EVENT_ENABLING, event7.systemName().get() ); } @Test public void testIllegalTrigger() throws Exception { try { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); provider.evaluate( underTest, "my-trigger" ); fail( "IllegalArgumentException not thrown." ); } catch( IllegalArgumentException e ) { // Expected. } } @Test public void testNormalToActivated() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testActivatedToDeactivated() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.deactivate(); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DEACTIVATED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testActivatedToAcknowledged() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.acknowledge(); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_ACKNOWLEDGED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testDeactivatedToNormal() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.deactivate(); underTest.acknowledge(); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_DEACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testAcknowledgedToNormal() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.acknowledge(); underTest.deactivate(); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACKNOWLEDGED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testDisabledToNormal() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.trigger( "disable" ); underTest.trigger( "enable" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_DISABLED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testBlockedToNormal() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.trigger( "block" ); underTest.trigger( "unblock" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_BLOCKED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testNormalToBlocked() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.trigger( "block" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_BLOCKED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testActivatedToBlocked() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.trigger( "block" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_BLOCKED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testDeactivatedToBlocked() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.deactivate(); underTest.trigger( "block" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_DEACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_BLOCKED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testAcknowledgedToBlocked() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.acknowledge(); underTest.trigger( "block" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACKNOWLEDGED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_BLOCKED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testNormalToDisabled() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.trigger( "disable" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DISABLED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testActivatedToDisabled() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.trigger( "disable" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DISABLED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testDeactivatedToDisabled() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.deactivate(); underTest.trigger( "disable" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_DEACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DISABLED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testAcknowledgedToDisabled() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.acknowledge(); underTest.trigger( "disable" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACKNOWLEDGED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DISABLED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testBlockedToDisabled() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.trigger( "block" ); underTest.trigger( "disable" ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_BLOCKED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DISABLED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testDisabledToBlocked() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.activate(); underTest.trigger( "disable" ); underTest.trigger( "block" ); // This trigger should be ignored. AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DISABLED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testConditionChanges1() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.updateCondition( false ); AlarmEvent event = underTest.history().lastEvent(); assertNull( "Generated an event but should have not.", event ); } @Test public void testConditionChanges2() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.updateCondition( true ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_NORMAL, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testConditionChanges3() throws Exception { AlarmPoint underTest = createAlarm( "Test AlarmPoint" ); underTest.updateCondition( true ); underTest.updateCondition( false ); AlarmEvent event = underTest.history().lastEvent(); AlarmStatus oldstate = event.oldStatus().get(); assertEquals( AlarmPoint.STATUS_ACTIVATED, oldstate.name( null ) ); AlarmStatus newstate = event.newStatus().get(); assertEquals( AlarmPoint.STATUS_DEACTIVATED, newstate.name( null ) ); AlarmPoint eventalarm = getAlarm( event.identity().get() ); assertEquals( underTest, eventalarm ); } @Test public void testComputeCondition() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus s1 = createStatus( AlarmPoint.STATUS_NORMAL ); assertFalse( provider.computeCondition( s1 ) ); AlarmStatus s2 = createStatus( AlarmPoint.STATUS_ACTIVATED ); assertTrue( provider.computeCondition( s2 ) ); AlarmStatus s3 = createStatus( AlarmPoint.STATUS_DEACTIVATED ); assertFalse( provider.computeCondition( s3 ) ); AlarmStatus s4 = createStatus( AlarmPoint.STATUS_ACKNOWLEDGED ); assertTrue( provider.computeCondition( s4 ) ); AlarmStatus s5 = createStatus( AlarmPoint.STATUS_DISABLED ); assertFalse( provider.computeCondition( s5 ) ); AlarmStatus s6 = createStatus( AlarmPoint.STATUS_BLOCKED ); assertFalse( provider.computeCondition( s6 ) ); AlarmStatus s7 = createStatus( AlarmPoint.STATUS_REACTIVATED ); assertTrue( provider.computeCondition( s7 ) ); } @Test public void testComputeTriggerNormal() throws Exception { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus status = createStatus( AlarmPoint.STATUS_NORMAL ); String trigger1 = provider.computeTrigger( status, true ); String trigger2 = provider.computeTrigger( status, false ); assertEquals( AlarmPoint.TRIGGER_ACTIVATE, trigger1 ); assertEquals( null, trigger2 ); } @Test public void testComputeTriggerActivated() { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus status = createStatus( AlarmPoint.STATUS_ACTIVATED ); String trigger1 = provider.computeTrigger( status, true ); String trigger2 = provider.computeTrigger( status, false ); assertEquals( null, trigger1 ); assertEquals( AlarmPoint.TRIGGER_DEACTIVATE, trigger2 ); } @Test public void testComputeTRiggerDeactivated() { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus status = createStatus( AlarmPoint.STATUS_DEACTIVATED ); String trigger1 = provider.computeTrigger( status, true ); String trigger2 = provider.computeTrigger( status, false ); assertEquals( AlarmPoint.TRIGGER_ACTIVATE, trigger1 ); assertEquals( null, trigger2 ); } @Test public void testComputeTriggerAcknowledged() { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus status = createStatus( AlarmPoint.STATUS_ACKNOWLEDGED ); String trigger1 = provider.computeTrigger( status, true ); String trigger2 = provider.computeTrigger( status, false ); assertEquals( null, trigger1 ); assertEquals( AlarmPoint.TRIGGER_DEACTIVATE, trigger2 ); } @Test public void testComputeTriggerReactivated() { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus status = createStatus( AlarmPoint.STATUS_REACTIVATED ); String trigger1 = provider.computeTrigger( status, true ); String trigger2 = provider.computeTrigger( status, false ); assertEquals( null, trigger1 ); assertEquals( AlarmPoint.TRIGGER_DEACTIVATE, trigger2 ); } @Test public void testComputeTriggerBlocked() { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus status = createStatus( AlarmPoint.STATUS_BLOCKED ); String trigger1 = provider.computeTrigger( status, true ); String trigger2 = provider.computeTrigger( status, false ); assertEquals( null, trigger1 ); assertEquals( null, trigger2 ); } @Test public void testComputeTriggerDisabled() { AlarmModel provider = serviceFinder.findService( AlarmModel.class ).get(); AlarmStatus status = createStatus( AlarmPoint.STATUS_DISABLED ); String trigger1 = provider.computeTrigger( status, true ); String trigger2 = provider.computeTrigger( status, false ); assertEquals( null, trigger1 ); assertEquals( null, trigger2 ); } private AlarmPoint createAlarm( String name ) { UnitOfWork uow = unitOfWorkFactory.currentUnitOfWork(); EntityBuilder<AlarmPoint> builder = uow.newEntityBuilder( AlarmPoint.class ); builder.instance().category().set( createCategory( "Testing" ) ); AlarmPoint.AlarmState state = builder.instanceFor( AlarmPoint.AlarmState.class ); state.currentStatus().set( createStatus( AlarmPoint.STATUS_NORMAL ) ); state.description().set( "Test Description" ); state.systemName().set( name ); return builder.newInstance(); } private AlarmCategory createCategory( String name ) { ValueBuilder<AlarmCategory> builder = valueBuilderFactory.newValueBuilder( AlarmCategory.class ); builder.prototype().name().set( name ); return builder.newInstance(); } private AlarmPoint getAlarm( Identity identity ) { UnitOfWork uow = unitOfWorkFactory.currentUnitOfWork(); return uow.get( AlarmPoint.class, identity ); } private AlarmStatus createStatus( String status ) { ValueBuilder<AlarmStatus> builder = valueBuilderFactory.newValueBuilder( AlarmStatus.class ); AlarmStatus.State statePrototype = builder.prototypeFor( AlarmStatus.State.class ); statePrototype.name().set( status ); statePrototype.creationDate().set( Instant.now() ); return builder.newInstance(); } }
apache/pulsar
36,862
pulsar-broker/src/test/java/org/apache/pulsar/client/api/ClientErrorsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.api; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import io.netty.channel.ChannelHandlerContext; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import lombok.Cleanup; import org.apache.bookkeeper.common.util.JsonUtil; import org.apache.pulsar.client.impl.ConsumerBase; import org.apache.pulsar.client.impl.PartitionedProducerImpl; import org.apache.pulsar.client.impl.ProducerBase; import org.apache.pulsar.common.api.proto.CommandLookupTopicResponse.LookupType; import org.apache.pulsar.common.api.proto.ServerError; import org.apache.pulsar.common.protocol.Commands; import org.apache.pulsar.common.protocol.schema.SchemaVersion; import org.awaitility.Awaitility; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; @Test(groups = "broker-api") public class ClientErrorsTest { MockBrokerService mockBrokerService; private static final int ASYNC_EVENT_COMPLETION_WAIT = 100; private static final String ASSERTION_ERROR = "AssertionError"; @BeforeClass(alwaysRun = true) public void setup() { mockBrokerService = new MockBrokerService(); mockBrokerService.start(); } @AfterClass(alwaysRun = true) public void teardown() { if (mockBrokerService != null) { mockBrokerService.stop(); } } @Test public void testMockBrokerService() throws PulsarClientException { // test default actions of mock broker service @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); try { Consumer<byte[]> consumer = client.newConsumer().topic("persistent://prop/use/ns/t1") .subscriptionName("sub1").subscribe(); Producer<byte[]> producer = client.newProducer().topic("persistent://prop/use/ns/t1").create(); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); producer.send("message".getBytes()); Thread.sleep(ASYNC_EVENT_COMPLETION_WAIT); consumer.unsubscribe(); producer.close(); consumer.close(); } catch (Exception e) { fail("None of the mocked operations should throw a client side exception"); } } @Test public void testProducerCreateFailWithoutRetry() throws Exception { producerCreateFailWithoutRetry("persistent://prop/use/ns/t1"); } @Test public void testPartitionedProducerCreateFailWithoutRetry() throws Exception { producerCreateFailWithoutRetry("persistent://prop/use/ns/part-t1"); } private void producerCreateFailWithoutRetry(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { if (counter.incrementAndGet() == 2) { // piggyback unknown error to relay assertion failure ctx.writeAndFlush( Commands.newError(producer.getRequestId(), ServerError.UnknownError, ASSERTION_ERROR)); return; } ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.AuthorizationError, "msg")); }); try { client.newProducer().topic(topic).create(); } catch (Exception e) { if (e.getMessage().equals(ASSERTION_ERROR)) { fail("Producer create should not retry on auth error"); } assertTrue(e instanceof PulsarClientException.AuthorizationException); } mockBrokerService.resetHandleProducer(); } @Test public void testProducerCreateSuccessAfterRetry() throws Exception { producerCreateSuccessAfterRetry("persistent://prop/use/ns/t1"); } @Test public void testPartitionedProducerCreateSuccessAfterRetry() throws Exception { producerCreateSuccessAfterRetry("persistent://prop/use/ns/part-t1"); } private void producerCreateSuccessAfterRetry(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { if (counter.incrementAndGet() == 2) { ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "default-producer", SchemaVersion.Empty)); return; } ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.ServiceNotReady, "msg")); }); try { client.newProducer().topic(topic).create(); } catch (Exception e) { fail("Should not fail"); } mockBrokerService.resetHandleProducer(); } @Test public void testProducerCreateFailAfterRetryTimeout() throws Exception { producerCreateFailAfterRetryTimeout("persistent://prop/use/ns/t1"); } @Test public void testPartitionedProducerCreateFailAfterRetryTimeout() throws Exception { producerCreateFailAfterRetryTimeout("persistent://prop/use/ns/part-t1"); } private void producerCreateFailAfterRetryTimeout(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()) .operationTimeout(1, TimeUnit.SECONDS).build(); final AtomicInteger counter = new AtomicInteger(0); final AtomicInteger closeProducerCounter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { if (counter.incrementAndGet() == 2) { try { Thread.sleep(2000); } catch (InterruptedException e) { // do nothing } } ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.ServiceNotReady, "msg")); }); mockBrokerService.setHandleCloseProducer((ctx, closeProducer) -> { closeProducerCounter.incrementAndGet(); }); try { client.newProducer().topic(topic).create(); fail("Should have failed"); } catch (Exception e) { // we fail even on the retriable error assertTrue(e instanceof PulsarClientException); } // There is a small race condition here because the producer's timeout both fails the client creation // and triggers sending CloseProducer. Awaitility.await().until(() -> closeProducerCounter.get() == 1); mockBrokerService.resetHandleProducer(); mockBrokerService.resetHandleCloseProducer(); } @Test public void testCreatedProducerSendsCloseProducerAfterTimeout() throws Exception { producerCreatedThenFailsRetryTimeout("persistent://prop/use/ns/t1"); } @Test public void testCreatedPartitionedProducerSendsCloseProducerAfterTimeout() throws Exception { producerCreatedThenFailsRetryTimeout("persistent://prop/use/ns/part-t1"); } private void producerCreatedThenFailsRetryTimeout(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()) .operationTimeout(1, TimeUnit.SECONDS).build(); final AtomicInteger producerCounter = new AtomicInteger(0); final AtomicInteger closeProducerCounter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { int producerCount = producerCounter.incrementAndGet(); if (producerCount == 1) { ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "producer1", SchemaVersion.Empty)); // Trigger reconnect ctx.writeAndFlush(Commands.newCloseProducer(producer.getProducerId(), -1)); } else if (producerCount != 2) { // Respond to subsequent requests to prevent timeouts ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "producer1", SchemaVersion.Empty)); } // Don't respond to the second Producer command to ensure timeout }); mockBrokerService.setHandleCloseProducer((ctx, closeProducer) -> { closeProducerCounter.incrementAndGet(); ctx.writeAndFlush(Commands.newSuccess(closeProducer.getRequestId())); }); // Create producer should succeed then upon closure, it should reattempt creation. The first request will // time out, which triggers CloseProducer. The client might send the third Producer command before the // below assertion, so we pass with 2 or 3. client.newProducer().topic(topic).create(); Awaitility.await().until(() -> closeProducerCounter.get() == 1); Awaitility.await().until(() -> producerCounter.get() == 2 || producerCounter.get() == 3); mockBrokerService.resetHandleProducer(); mockBrokerService.resetHandleCloseProducer(); } @Test public void testCreatedConsumerSendsCloseConsumerAfterTimeout() throws Exception { consumerCreatedThenFailsRetryTimeout("persistent://prop/use/ns/t1"); } @Test public void testCreatedPartitionedConsumerSendsCloseConsumerAfterTimeout() throws Exception { consumerCreatedThenFailsRetryTimeout("persistent://prop/use/ns/part-t1"); } private void consumerCreatedThenFailsRetryTimeout(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()) .operationTimeout(1, TimeUnit.SECONDS).build(); final AtomicInteger subscribeCounter = new AtomicInteger(0); final AtomicInteger closeConsumerCounter = new AtomicInteger(0); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { int subscribeCount = subscribeCounter.incrementAndGet(); if (subscribeCount == 1) { ctx.writeAndFlush(Commands.newSuccess(subscribe.getRequestId())); // Trigger reconnect ctx.writeAndFlush(Commands.newCloseConsumer(subscribe.getConsumerId(), -1, null, null)); } else if (subscribeCount != 2) { // Respond to subsequent requests to prevent timeouts ctx.writeAndFlush(Commands.newSuccess(subscribe.getRequestId())); } // Don't respond to the second Subscribe command to ensure timeout }); mockBrokerService.setHandleCloseConsumer((ctx, closeConsumer) -> { closeConsumerCounter.incrementAndGet(); ctx.writeAndFlush(Commands.newSuccess(closeConsumer.getRequestId())); }); // Create consumer (subscribe) should succeed then upon closure, it should reattempt creation. The first // request will time out, which triggers CloseConsumer. The client might send the third Subscribe command before // the below assertion, so we pass with 2 or 3. client.newConsumer().topic(topic).subscriptionName("test").subscribe(); Awaitility.await().until(() -> closeConsumerCounter.get() == 1); Awaitility.await().until(() -> subscribeCounter.get() == 2 || subscribeCounter.get() == 3); mockBrokerService.resetHandleSubscribe(); mockBrokerService.resetHandleCloseConsumer(); } @Test public void testProducerFailDoesNotFailOtherProducer() throws Exception { producerFailDoesNotFailOtherProducer("persistent://prop/use/ns/t1", "persistent://prop/use/ns/t2"); } @Test public void testPartitionedProducerFailDoesNotFailOtherProducer() throws Exception { producerFailDoesNotFailOtherProducer("persistent://prop/use/ns/part-t1", "persistent://prop/use/ns/part-t2"); } private void producerFailDoesNotFailOtherProducer(String topic1, String topic2) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { if (counter.incrementAndGet() == 2) { // fail second producer ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.AuthorizationError, "msg")); return; } ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "default-producer", SchemaVersion.Empty)); }); ProducerBase<byte[]> producer1 = (ProducerBase<byte[]>) client.newProducer().topic(topic1).create(); ProducerBase<byte[]> producer2 = null; try { producer2 = (ProducerBase<byte[]>) client.newProducer().topic(topic2).create(); fail("Should have failed"); } catch (Exception e) { // ok } assertTrue(producer1.isConnected()); assertFalse(producer2 != null && producer2.isConnected()); mockBrokerService.resetHandleProducer(); } @Test public void testProducerContinuousRetryAfterSendFail() throws Exception { producerContinuousRetryAfterSendFail("persistent://prop/use/ns/t1"); } @Test public void testPartitionedProducerContinuousRetryAfterSendFail() throws Exception { producerContinuousRetryAfterSendFail("persistent://prop/use/ns/part-t1"); } private void producerContinuousRetryAfterSendFail(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { int i = counter.incrementAndGet(); if (i == 1 || i == 5) { // succeed on 1st and 5th attempts ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "default-producer", SchemaVersion.Empty)); return; } ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.PersistenceError, "msg")); }); final AtomicInteger msgCounter = new AtomicInteger(0); mockBrokerService.setHandleSend((ctx, send, headersAndPayload) -> { // fail send once, but succeed later if (msgCounter.incrementAndGet() == 1) { ctx.writeAndFlush(Commands.newSendError(0, 0, ServerError.PersistenceError, "Send Failed")); return; } ctx.writeAndFlush(Commands.newSendReceipt(0, 0, 0, 1, 1)); }); try { Producer<byte[]> producer = client.newProducer().topic(topic).create(); producer.send("message".getBytes()); } catch (Exception e) { fail("Should not fail"); } mockBrokerService.resetHandleProducer(); mockBrokerService.resetHandleSend(); } @Test public void testSubscribeFailWithoutRetry() throws Exception { subscribeFailWithoutRetry("persistent://prop/use/ns/t1"); } @Test public void testPartitionedSubscribeFailWithoutRetry() throws Exception { subscribeFailWithoutRetry("persistent://prop/use/ns/part-t1"); } @Test public void testLookupWithDisconnection() throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); final AtomicInteger counter = new AtomicInteger(0); String topic = "persistent://prop/use/ns/t1"; mockBrokerService.setHandlePartitionLookup((ctx, lookup) -> { ctx.writeAndFlush(Commands.newPartitionMetadataResponse(0, lookup.getRequestId())); }); mockBrokerService.setHandleLookup((ctx, lookup) -> { if (counter.incrementAndGet() == 1) { // piggyback unknown error to relay assertion failure ctx.close(); return; } ctx.writeAndFlush( Commands.newLookupResponse(mockBrokerService.getBrokerAddress(), null, true, LookupType.Connect, lookup.getRequestId(), false)); }); try { client.newConsumer().topic(topic).subscriptionName("sub1").subscribe(); } catch (Exception e) { if (e.getMessage().equals(ASSERTION_ERROR)) { fail("Subscribe should not retry on persistence error"); } assertTrue(e instanceof PulsarClientException.BrokerPersistenceException); } mockBrokerService.resetHandlePartitionLookup(); mockBrokerService.resetHandleLookup(); } private void subscribeFailWithoutRetry(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()) .operationTimeout(1, TimeUnit.SECONDS).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { if (counter.incrementAndGet() == 2) { // piggyback unknown error to relay assertion failure ctx.writeAndFlush( Commands.newError(subscribe.getRequestId(), ServerError.UnknownError, ASSERTION_ERROR)); return; } ctx.writeAndFlush(Commands.newError(subscribe.getRequestId(), ServerError.PersistenceError, "msg")); }); try { client.newConsumer().topic(topic).subscriptionName("sub1").subscribe(); } catch (Exception e) { if (e.getMessage().equals(ASSERTION_ERROR)) { fail("Subscribe should not retry on persistence error"); } assertTrue(e instanceof PulsarClientException.BrokerPersistenceException); } mockBrokerService.resetHandleSubscribe(); } @Test public void testSubscribeSuccessAfterRetry() throws Exception { subscribeSuccessAfterRetry("persistent://prop/use/ns/t1"); } @Test public void testPartitionedSubscribeSuccessAfterRetry() throws Exception { subscribeSuccessAfterRetry("persistent://prop/use/ns/part-t1"); } private void subscribeSuccessAfterRetry(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { if (counter.incrementAndGet() == 2) { ctx.writeAndFlush(Commands.newSuccess(subscribe.getRequestId())); return; } ctx.writeAndFlush(Commands.newError(subscribe.getRequestId(), ServerError.ServiceNotReady, "msg")); }); try { client.newConsumer().topic(topic).subscriptionName("sub1").subscribe(); } catch (Exception e) { fail("Should not fail"); } mockBrokerService.resetHandleSubscribe(); } @Test public void testSubscribeFailAfterRetryTimeout() throws Exception { subscribeFailAfterRetryTimeout("persistent://prop/use/ns/t1"); } @Test public void testPartitionedSubscribeFailAfterRetryTimeout() throws Exception { subscribeFailAfterRetryTimeout("persistent://prop/use/ns/part-t1"); } private void subscribeFailAfterRetryTimeout(String topic) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()) .operationTimeout(200, TimeUnit.MILLISECONDS).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { if (counter.incrementAndGet() == 2) { try { Thread.sleep(500); } catch (InterruptedException e) { // do nothing } } ctx.writeAndFlush(Commands.newError(subscribe.getRequestId(), ServerError.ServiceNotReady, "msg")); }); try { client.newConsumer().topic(topic).subscriptionName("sub1").subscribe(); fail("Should have failed"); } catch (Exception e) { // we fail even on the retriable error assertTrue(e instanceof PulsarClientException); } mockBrokerService.resetHandleSubscribe(); } @Test public void testSubscribeFailDoesNotFailOtherConsumer() throws Exception { subscribeFailDoesNotFailOtherConsumer("persistent://prop/use/ns/t1", "persistent://prop/use/ns/t2"); } @Test public void testPartitionedSubscribeFailDoesNotFailOtherConsumer() throws Exception { subscribeFailDoesNotFailOtherConsumer("persistent://prop/use/ns/part-t1", "persistent://prop/use/ns/part-t2"); } private void subscribeFailDoesNotFailOtherConsumer(String topic1, String topic2) throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); final AtomicInteger counter = new AtomicInteger(0); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { if (counter.incrementAndGet() == 2) { // fail second producer ctx.writeAndFlush(Commands.newError(subscribe.getRequestId(), ServerError.AuthorizationError, "msg")); return; } ctx.writeAndFlush(Commands.newSuccess(subscribe.getRequestId())); }); ConsumerBase<byte[]> consumer1 = (ConsumerBase<byte[]>) client.newConsumer().topic(topic1) .subscriptionName("sub1").subscribe(); ConsumerBase<byte[]> consumer2 = null; try { consumer2 = (ConsumerBase<byte[]>) client.newConsumer().topic(topic2).subscriptionName("sub1").subscribe(); fail("Should have failed"); } catch (Exception e) { // ok } assertTrue(consumer1.isConnected()); assertFalse(consumer2 != null && consumer2.isConnected()); mockBrokerService.resetHandleSubscribe(); } // failed to connect to partition at initialization step if a producer which connects to broker as lazy-loading mode @Test public void testPartitionedProducerFailOnInitialization() throws Throwable { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getHttpAddress()).build(); final AtomicInteger producerCounter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { if (producerCounter.incrementAndGet() == 1) { ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.AuthorizationError, "msg")); return; } ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "default-producer", SchemaVersion.Empty)); }); try { client.newProducer() .enableLazyStartPartitionedProducers(true) .accessMode(ProducerAccessMode.Shared) .topic("persistent://prop/use/ns/multi-part-t1").create(); fail("Should have failed with an authorization error"); } catch (Exception e) { assertTrue(e instanceof PulsarClientException.AuthorizationException); } assertEquals(producerCounter.get(), 1); mockBrokerService.resetHandleProducer(); mockBrokerService.resetHandleCloseProducer(); } // failed to connect to partition at sending step if a producer which connects to broker as lazy-loading mode @Test public void testPartitionedProducerFailOnSending() throws Throwable { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getHttpAddress()).build(); final AtomicInteger producerCounter = new AtomicInteger(0); final AtomicInteger closeCounter = new AtomicInteger(0); final String topicName = "persistent://prop/use/ns/multi-part-t1"; mockBrokerService.setHandleProducer((ctx, producer) -> { if (producerCounter.incrementAndGet() == 2) { ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.AuthorizationError, "msg")); return; } ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "default-producer", SchemaVersion.Empty)); }); mockBrokerService.setHandleSend((ctx, send, headersAndPayload) -> ctx.writeAndFlush(Commands.newSendReceipt(send.getProducerId(), send.getSequenceId(), send.getHighestSequenceId(), 0L, 0L)) ); mockBrokerService.setHandleCloseProducer((ctx, closeProducer) -> { ctx.writeAndFlush(Commands.newSuccess(closeProducer.getRequestId())); closeCounter.incrementAndGet(); }); final PartitionedProducerImpl<byte[]> producer = (PartitionedProducerImpl<byte[]>) client.newProducer() .enableLazyStartPartitionedProducers(true) .accessMode(ProducerAccessMode.Shared) .topic(topicName) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.RoundRobinPartition) .create(); try { producer.send("msg".getBytes()); fail("Should have failed with an not connected exception"); } catch (Exception e) { assertTrue(e instanceof PulsarClientException.NotConnectedException); assertEquals(producer.getProducers().size(), 1); } try { // recreate failed producer for (int i = 0; i < client.getPartitionsForTopic(topicName).get().size(); i++) { producer.send("msg".getBytes()); } assertEquals(producer.getProducers().size(), client.getPartitionsForTopic(topicName).get().size()); assertEquals(producerCounter.get(), 5); } catch (Exception e) { fail(); } // should not call close assertEquals(closeCounter.get(), 0); mockBrokerService.resetHandleProducer(); mockBrokerService.resetHandleCloseProducer(); } // if a producer which doesn't connect as lazy-loading mode fails to connect while creating partitioned producer, // it should close all successful connections of other producers and fail @Test public void testOneProducerFailShouldCloseAllProducersInPartitionedProducer() throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getHttpAddress()).build(); final AtomicInteger producerCounter = new AtomicInteger(0); final AtomicInteger closeCounter = new AtomicInteger(0); mockBrokerService.setHandleProducer((ctx, producer) -> { if (producerCounter.incrementAndGet() == 3) { ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.AuthorizationError, "msg")); return; } ctx.writeAndFlush(Commands.newProducerSuccess(producer.getRequestId(), "default-producer", SchemaVersion.Empty)); }); mockBrokerService.setHandleCloseProducer((ctx, closeProducer) -> { ctx.writeAndFlush(Commands.newSuccess(closeProducer.getRequestId())); closeCounter.incrementAndGet(); }); try { client.newProducer().topic("persistent://prop/use/ns/multi-part-t1").create(); fail("Should have failed with an authorization error"); } catch (Exception e) { assertTrue(e instanceof PulsarClientException.AuthorizationException); // should call close for 3 partitions assertEquals(closeCounter.get(), 3); } mockBrokerService.resetHandleProducer(); mockBrokerService.resetHandleCloseProducer(); } // if a consumer fails to subscribe while creating partitioned consumer, it should close all successful connections // of other consumers and fail @Test public void testOneConsumerFailShouldCloseAllConsumersInPartitionedConsumer() throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getHttpAddress()).build(); final AtomicInteger subscribeCounter = new AtomicInteger(0); final AtomicInteger closeCounter = new AtomicInteger(0); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { System.err.println("subscribeCounter: " + subscribeCounter.get()); if (subscribeCounter.incrementAndGet() == 3) { ctx.writeAndFlush(Commands.newError(subscribe.getRequestId(), ServerError.AuthorizationError, "msg")); return; } ctx.writeAndFlush(Commands.newSuccess(subscribe.getRequestId())); }); mockBrokerService.setHandleCloseConsumer((ctx, closeConsumer) -> { ctx.writeAndFlush(Commands.newSuccess(closeConsumer.getRequestId())); closeCounter.incrementAndGet(); }); try { client.newConsumer().topic("persistent://prop/use/ns/multi-part-t1").subscriptionName("sub1").subscribe(); fail("Should have failed with an authorization error"); } catch (PulsarClientException.AuthorizationException e) { } // should call close for 3 partitions assertEquals(closeCounter.get(), 3); mockBrokerService.resetHandleSubscribe(); mockBrokerService.resetHandleCloseConsumer(); } @Test public void testFlowSendWhenPartitionedSubscribeCompletes() throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getHttpAddress()).build(); AtomicInteger subscribed = new AtomicInteger(); AtomicBoolean fail = new AtomicBoolean(false); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { subscribed.incrementAndGet(); ctx.writeAndFlush(Commands.newSuccess(subscribe.getRequestId())); }); mockBrokerService.setHandleFlow((ctx, sendFlow) -> { if (subscribed.get() != 4) { fail.set(true); } }); client.newConsumer().topic("persistent://prop/use/ns/multi-part-t1").subscriptionName("sub1").subscribe(); if (fail.get()) { fail("Flow command should have been sent after all 4 partitions subscribe successfully"); } mockBrokerService.resetHandleSubscribe(); mockBrokerService.resetHandleFlow(); } // Run this test multiple times to reproduce race conditions on reconnection logic @Test(invocationCount = 10, groups = "broker-api") public void testProducerReconnect() throws Exception { AtomicInteger numOfConnections = new AtomicInteger(); AtomicReference<ChannelHandlerContext> channelCtx = new AtomicReference<>(); AtomicBoolean msgSent = new AtomicBoolean(); mockBrokerService.setHandleConnect((ctx, connect) -> { channelCtx.set(ctx); ctx.writeAndFlush(Commands.newConnected(connect.getProtocolVersion(), false)); if (numOfConnections.incrementAndGet() == 2) { // close the cnx immediately when trying to connect the 2nd time ctx.channel().close(); } }); mockBrokerService.setHandleProducer((ctx, produce) -> { ctx.writeAndFlush(Commands.newProducerSuccess(produce.getRequestId(), "default-producer", SchemaVersion.Empty)); }); mockBrokerService.setHandleSend((ctx, sendCmd, headersAndPayload) -> { msgSent.set(true); ctx.writeAndFlush(Commands.newSendReceipt(0, 0, 0, 1, 1)); }); @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); Producer<byte[]> producer = client.newProducer().topic("persistent://prop/use/ns/t1").create(); // close the cnx after creating the producer channelCtx.get().channel().close().get(); producer.send(new byte[0]); assertTrue(msgSent.get()); assertTrue(numOfConnections.get() >= 3); mockBrokerService.resetHandleConnect(); mockBrokerService.resetHandleProducer(); mockBrokerService.resetHandleSend(); } @Test public void testConsumerReconnect() throws Exception { AtomicInteger numOfConnections = new AtomicInteger(); AtomicReference<ChannelHandlerContext> channelCtx = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); mockBrokerService.setHandleConnect((ctx, connect) -> { channelCtx.set(ctx); ctx.writeAndFlush(Commands.newConnected(connect.getProtocolVersion(), false)); if (numOfConnections.incrementAndGet() == 2) { // close the cnx immediately when trying to connect the 2nd time ctx.channel().close(); } if (numOfConnections.get() == 3) { latch.countDown(); } }); mockBrokerService.setHandleSubscribe((ctx, subscribe) -> { ctx.writeAndFlush(Commands.newSuccess(subscribe.getRequestId())); }); @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); client.newConsumer().topic("persistent://prop/use/ns/t1").subscriptionName("sub1").subscribe(); // close the cnx after creating the producer channelCtx.get().channel().close(); latch.await(5, TimeUnit.SECONDS); assertEquals(numOfConnections.get(), 3); mockBrokerService.resetHandleConnect(); mockBrokerService.resetHandleSubscribe(); } @Test public void testCommandErrorMessageIsNull() throws Exception { @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(mockBrokerService.getBrokerAddress()).build(); mockBrokerService.setHandleProducer((ctx, producer) -> { try { ctx.writeAndFlush(Commands.newError(producer.getRequestId(), ServerError.AuthorizationError, null)); } catch (Exception e) { fail("Send error command failed", e); } }); try { client.newProducer().topic("persistent://prop/use/ns/t1").create(); fail(); } catch (Exception e) { assertTrue(e instanceof PulsarClientException.AuthorizationException); Map<String, String> map = JsonUtil.fromJson(e.getMessage(), Map.class); assertEquals(map.get("errorMsg"), ""); } mockBrokerService.resetHandleProducer(); } }
google/closure-compiler
36,276
test/com/google/javascript/jscomp/JSChunkGraphTest.java
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static java.util.Collections.shuffle; import static org.junit.Assert.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.javascript.jscomp.deps.DependencyInfo.Require; import com.google.javascript.rhino.StaticSourceFile.SourceKind; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.HashMap; import java.util.List; import java.util.stream.Collectors; import org.jspecify.annotations.Nullable; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link JSChunkGraph} */ @RunWith(JUnit4.class) public final class JSChunkGraphTest { private JSChunk chunkA; private JSChunk chunkB; private JSChunk chunkC; private JSChunk chunkD; private JSChunk chunkE; private JSChunk chunkF; private @Nullable JSChunkGraph graph = null; // For resolving dependencies only. private Compiler compiler; @Before public void setUp() throws Exception { compiler = new Compiler(); } private void makeDeps() { chunkA = new JSChunk("chunkA"); chunkB = new JSChunk("chunkB"); chunkC = new JSChunk("chunkC"); chunkD = new JSChunk("chunkD"); chunkE = new JSChunk("chunkE"); chunkF = new JSChunk("chunkF"); chunkB.addDependency(chunkA); // __A__ chunkC.addDependency(chunkA); // / | \ chunkD.addDependency(chunkB); // B C | chunkE.addDependency(chunkB); // / \ /| | chunkE.addDependency(chunkC); // D E | / chunkF.addDependency(chunkA); // \|/ chunkF.addDependency(chunkC); // F chunkF.addDependency(chunkE); } private void makeGraph() { graph = new JSChunkGraph(new JSChunk[] {chunkA, chunkB, chunkC, chunkD, chunkE, chunkF}); } private JSChunk getWeakModule() { return graph.getChunkByName(JSChunk.WEAK_CHUNK_NAME); } @Test public void testMakesWeakModuleIfNotPassed() { makeDeps(); makeGraph(); assertThat(graph.getChunkCount()).isEqualTo(7); assertThat(graph.getChunksByName()).containsKey(JSChunk.WEAK_CHUNK_NAME); assertThat(getWeakModule().getAllDependencies()) .containsExactly(chunkA, chunkB, chunkC, chunkD, chunkE, chunkF); } @Test public void testAcceptExistingWeakModule() { makeDeps(); JSChunk weakChunk = new JSChunk(JSChunk.WEAK_CHUNK_NAME); weakChunk.addDependency(chunkA); weakChunk.addDependency(chunkB); weakChunk.addDependency(chunkC); weakChunk.addDependency(chunkD); weakChunk.addDependency(chunkE); weakChunk.addDependency(chunkF); weakChunk.add(SourceFile.fromCode("weak", "", SourceKind.WEAK)); JSChunkGraph graph = new JSChunkGraph(new JSChunk[] {chunkA, chunkB, chunkC, chunkD, chunkE, chunkF, weakChunk}); assertThat(graph.getChunkCount()).isEqualTo(7); assertThat(graph.getChunkByName(JSChunk.WEAK_CHUNK_NAME)).isSameInstanceAs(weakChunk); } @Test public void testExistingWeakModuleMustHaveDependenciesOnAllOtherModules() { makeDeps(); JSChunk weakChunk = new JSChunk(JSChunk.WEAK_CHUNK_NAME); weakChunk.addDependency(chunkA); weakChunk.addDependency(chunkB); weakChunk.addDependency(chunkC); weakChunk.addDependency(chunkD); weakChunk.addDependency(chunkE); // Missing F IllegalStateException e = assertThrows( IllegalStateException.class, () -> new JSChunkGraph( new JSChunk[] {chunkA, chunkB, chunkC, chunkD, chunkE, chunkF, weakChunk})); assertThat(e) .hasMessageThat() .isEqualTo("A weak chunk already exists but it does not depend on every other chunk."); } @Test public void testWeakFileCannotExistOutsideWeakModule() { makeDeps(); JSChunk weakChunk = new JSChunk(JSChunk.WEAK_CHUNK_NAME); weakChunk.addDependency(chunkA); weakChunk.addDependency(chunkB); weakChunk.addDependency(chunkC); weakChunk.addDependency(chunkD); weakChunk.addDependency(chunkE); weakChunk.addDependency(chunkF); chunkA.add(SourceFile.fromCode("a", "", SourceKind.WEAK)); IllegalStateException e = assertThrows( IllegalStateException.class, () -> new JSChunkGraph( new JSChunk[] {chunkA, chunkB, chunkC, chunkD, chunkE, chunkF, weakChunk})); assertThat(e) .hasMessageThat() .contains("Found these weak sources in other chunks:\n a (in chunk chunkA)"); } @Test public void testStrongFileCannotExistInWeakModule() { makeDeps(); JSChunk weakChunk = new JSChunk(JSChunk.WEAK_CHUNK_NAME); weakChunk.addDependency(chunkA); weakChunk.addDependency(chunkB); weakChunk.addDependency(chunkC); weakChunk.addDependency(chunkD); weakChunk.addDependency(chunkE); weakChunk.addDependency(chunkF); weakChunk.add(SourceFile.fromCode("a", "", SourceKind.STRONG)); IllegalStateException e = assertThrows( IllegalStateException.class, () -> new JSChunkGraph( new JSChunk[] {chunkA, chunkB, chunkC, chunkD, chunkE, chunkF, weakChunk})); ; assertThat(e).hasMessageThat().contains("Found these strong sources in the weak chunk:\n a"); } @Test public void testSmallerTreeBeatsDeeperTree() { final JSChunk a = new JSChunk("a"); final JSChunk b = new JSChunk("b"); final JSChunk c = new JSChunk("c"); final JSChunk d = new JSChunk("d"); final JSChunk e = new JSChunk("e"); final JSChunk f = new JSChunk("f"); final JSChunk g = new JSChunk("g"); final JSChunk h = new JSChunk("h"); // a // / \ // b c b.addDependency(a); c.addDependency(a); // b // /|\ // e f g e.addDependency(b); f.addDependency(b); g.addDependency(b); // c // | // d // // \\ // / | | \ // e f g h d.addDependency(c); e.addDependency(d); f.addDependency(d); g.addDependency(d); h.addDependency(d); JSChunkGraph graph = new JSChunkGraph(new JSChunk[] {a, b, c, d, e, f, g, h}); // d is deeper, but it also has an extra dependent node, so b is the better choice. assertSmallestCoveringSubtree(b, graph, a, e, f, g); // However, if the parent tree we're looking at is c, then b isn't an option assertSmallestCoveringSubtree(d, graph, c, e, f, g); } @Test public void testModuleDepth() { makeDeps(); makeGraph(); assertWithMessage("chunkA should have depth 0").that(chunkA.getDepth()).isEqualTo(0); assertWithMessage("chunkB should have depth 1").that(chunkB.getDepth()).isEqualTo(1); assertWithMessage("chunkC should have depth 1").that(chunkC.getDepth()).isEqualTo(1); assertWithMessage("chunkD should have depth 2").that(chunkD.getDepth()).isEqualTo(2); assertWithMessage("chunkE should have depth 2").that(chunkE.getDepth()).isEqualTo(2); assertWithMessage("chunkF should have depth 3").that(chunkF.getDepth()).isEqualTo(3); } @Test public void testDeepestCommonDep() { makeDeps(); makeGraph(); assertDeepestCommonDep(null, chunkA, chunkA); assertDeepestCommonDep(null, chunkA, chunkB); assertDeepestCommonDep(null, chunkA, chunkC); assertDeepestCommonDep(null, chunkA, chunkD); assertDeepestCommonDep(null, chunkA, chunkE); assertDeepestCommonDep(null, chunkA, chunkF); assertDeepestCommonDep(chunkA, chunkB, chunkB); assertDeepestCommonDep(chunkA, chunkB, chunkC); assertDeepestCommonDep(chunkA, chunkB, chunkD); assertDeepestCommonDep(chunkA, chunkB, chunkE); assertDeepestCommonDep(chunkA, chunkB, chunkF); assertDeepestCommonDep(chunkA, chunkC, chunkC); assertDeepestCommonDep(chunkA, chunkC, chunkD); assertDeepestCommonDep(chunkA, chunkC, chunkE); assertDeepestCommonDep(chunkA, chunkC, chunkF); assertDeepestCommonDep(chunkB, chunkD, chunkD); assertDeepestCommonDep(chunkB, chunkD, chunkE); assertDeepestCommonDep(chunkB, chunkD, chunkF); assertDeepestCommonDep(chunkC, chunkE, chunkE); assertDeepestCommonDep(chunkC, chunkE, chunkF); assertDeepestCommonDep(chunkE, chunkF, chunkF); } @Test public void testDeepestCommonDepInclusive() { makeDeps(); makeGraph(); assertDeepestCommonDepInclusive(chunkA, chunkA, chunkA); assertDeepestCommonDepInclusive(chunkA, chunkA, chunkB); assertDeepestCommonDepInclusive(chunkA, chunkA, chunkC); assertDeepestCommonDepInclusive(chunkA, chunkA, chunkD); assertDeepestCommonDepInclusive(chunkA, chunkA, chunkE); assertDeepestCommonDepInclusive(chunkA, chunkA, chunkF); assertDeepestCommonDepInclusive(chunkB, chunkB, chunkB); assertDeepestCommonDepInclusive(chunkA, chunkB, chunkC); assertDeepestCommonDepInclusive(chunkB, chunkB, chunkD); assertDeepestCommonDepInclusive(chunkB, chunkB, chunkE); assertDeepestCommonDepInclusive(chunkB, chunkB, chunkF); assertDeepestCommonDepInclusive(chunkC, chunkC, chunkC); assertDeepestCommonDepInclusive(chunkA, chunkC, chunkD); assertDeepestCommonDepInclusive(chunkC, chunkC, chunkE); assertDeepestCommonDepInclusive(chunkC, chunkC, chunkF); assertDeepestCommonDepInclusive(chunkD, chunkD, chunkD); assertDeepestCommonDepInclusive(chunkB, chunkD, chunkE); assertDeepestCommonDepInclusive(chunkB, chunkD, chunkF); assertDeepestCommonDepInclusive(chunkE, chunkE, chunkE); assertDeepestCommonDepInclusive(chunkE, chunkE, chunkF); assertDeepestCommonDepInclusive(chunkF, chunkF, chunkF); } @Test public void testSmallestCoveringSubtree() { makeDeps(); makeGraph(); assertSmallestCoveringSubtree(chunkA, chunkA, chunkA, chunkA); assertSmallestCoveringSubtree(chunkA, chunkA, chunkA, chunkB); assertSmallestCoveringSubtree(chunkA, chunkA, chunkA, chunkC); assertSmallestCoveringSubtree(chunkA, chunkA, chunkA, chunkD); assertSmallestCoveringSubtree(chunkA, chunkA, chunkA, chunkE); assertSmallestCoveringSubtree(chunkA, chunkA, chunkA, chunkF); assertSmallestCoveringSubtree(chunkB, chunkA, chunkB, chunkB); assertSmallestCoveringSubtree(chunkA, chunkA, chunkB, chunkC); assertSmallestCoveringSubtree(chunkB, chunkA, chunkB, chunkD); assertSmallestCoveringSubtree(chunkB, chunkA, chunkB, chunkE); assertSmallestCoveringSubtree(chunkB, chunkA, chunkB, chunkF); assertSmallestCoveringSubtree(chunkC, chunkA, chunkC, chunkC); assertSmallestCoveringSubtree(chunkA, chunkA, chunkC, chunkD); assertSmallestCoveringSubtree(chunkC, chunkA, chunkC, chunkE); assertSmallestCoveringSubtree(chunkC, chunkA, chunkC, chunkF); assertSmallestCoveringSubtree(chunkD, chunkA, chunkD, chunkD); assertSmallestCoveringSubtree(chunkB, chunkA, chunkD, chunkE); assertSmallestCoveringSubtree(chunkB, chunkA, chunkD, chunkF); assertSmallestCoveringSubtree(chunkE, chunkA, chunkE, chunkE); assertSmallestCoveringSubtree(chunkE, chunkA, chunkE, chunkF); assertSmallestCoveringSubtree(chunkF, chunkA, chunkF, chunkF); } @Test public void testGetTransitiveDepsDeepestFirst() { makeDeps(); makeGraph(); assertTransitiveDepsDeepestFirst(chunkA); assertTransitiveDepsDeepestFirst(chunkB, chunkA); assertTransitiveDepsDeepestFirst(chunkC, chunkA); assertTransitiveDepsDeepestFirst(chunkD, chunkB, chunkA); assertTransitiveDepsDeepestFirst(chunkE, chunkC, chunkB, chunkA); assertTransitiveDepsDeepestFirst(chunkF, chunkE, chunkC, chunkB, chunkA); } @Test public void testManageDependenciesLooseWithoutEntryPoint() throws Exception { makeDeps(); makeGraph(); setUpManageDependenciesTest(); DependencyOptions depOptions = DependencyOptions.pruneLegacyForEntryPoints(ImmutableList.of()); ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, depOptions); assertInputs(chunkA, "a1", "a3"); assertInputs(chunkB, "a2", "b2"); assertInputs(chunkC); // no inputs assertInputs(chunkE, "c1", "e1", "e2"); assertThat(sourceNames(results)) .isEqualTo(ImmutableList.of("a1", "a3", "a2", "b2", "c1", "e1", "e2")); } @Test public void testManageDependenciesLooseWithEntryPoint() throws Exception { makeDeps(); makeGraph(); setUpManageDependenciesTest(); DependencyOptions depOptions = DependencyOptions.pruneLegacyForEntryPoints( ImmutableList.of(ModuleIdentifier.forClosure("c2"))); ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, depOptions); assertInputs(chunkA, "a1", "a3"); assertInputs(chunkB, "a2", "b2"); assertInputs(chunkC, "c1", "c2"); assertInputs(chunkE, "e1", "e2"); assertThat(sourceNames(results)) .isEqualTo(ImmutableList.of("a1", "a3", "a2", "b2", "c1", "c2", "e1", "e2")); } @Test public void testManageDependenciesStrictWithEntryPoint() throws Exception { makeDeps(); makeGraph(); setUpManageDependenciesTest(); DependencyOptions depOptions = DependencyOptions.pruneForEntryPoints(ImmutableList.of(ModuleIdentifier.forClosure("c2"))); ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, depOptions); // Everything gets pushed up into module c, because that's // the only one that has entry points. assertInputs(chunkA); assertInputs(chunkB); assertInputs(chunkC, "a1", "c1", "c2"); assertInputs(chunkE); assertThat(sourceNames(results)).containsExactly("a1", "c1", "c2").inOrder(); } @Test public void testManageDependenciesStrictForGoogRequireDynamic() throws Exception { JSChunk chunkA = new JSChunk("chunk"); graph = new JSChunkGraph(new JSChunk[] {chunkA}); List<CompilerInput> inputs = new ArrayList<>(); CompilerInput compilerInputA1 = new CompilerInput(code("a1", provides("a1"), requires())); compilerInputA1.addRequireDynamicImports("a2"); chunkA.add(compilerInputA1); chunkA.add(code("a2", provides("a2"), requires())); inputs.addAll(chunkA.getInputs()); for (CompilerInput input : inputs) { input.setCompiler(compiler); } DependencyOptions depOptions = DependencyOptions.pruneForEntryPoints(ImmutableList.of(ModuleIdentifier.forClosure("a1"))); ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, depOptions); assertInputs(chunkA, "a1", "a2"); assertThat(sourceNames(results)).containsExactly("a1", "a2"); } @Test public void testManageDependenciesStrictWithEntryPointWithDuplicates() throws Exception { final JSChunk a = new JSChunk("a"); JSChunkGraph graph = new JSChunkGraph(new JSChunk[] {a}); // Create all the input files. List<CompilerInput> inputs = new ArrayList<>(); a.add(code("a1", provides("a1"), requires("a2"))); a.add(code("a2", provides("a2"), requires())); a.add(code("a3", provides("a2"), requires())); inputs.addAll(a.getInputs()); for (CompilerInput input : inputs) { input.setCompiler(compiler); } DependencyOptions depOptions = DependencyOptions.pruneForEntryPoints(ImmutableList.of(ModuleIdentifier.forClosure("a1"))); ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, depOptions); // Everything gets pushed up into module c, because that's // the only one that has entry points. assertInputs(a, "a2", "a3", "a1"); assertThat(sourceNames(results)).containsExactly("a2", "a3", "a1").inOrder(); } @Test public void testManageDependenciesSortOnly() throws Exception { makeDeps(); makeGraph(); setUpManageDependenciesTest(); ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, DependencyOptions.sortOnly()); assertInputs(chunkA, "a1", "a2", "a3"); assertInputs(chunkB, "b1", "b2"); assertInputs(chunkC, "c1", "c2"); assertInputs(chunkE, "e1", "e2"); assertThat(sourceNames(results)) .isEqualTo(ImmutableList.of("a1", "a2", "a3", "b1", "b2", "c1", "c2", "e1", "e2")); } // NOTE: The newline between the @provideGoog comment and the var statement is required. private static final String BASEJS = """ /** @fileoverview * @provideGoog */ var COMPILED = false; var goog = goog || {} """; @Test public void testManageDependenciesSortOnlyImpl() throws Exception { makeDeps(); makeGraph(); chunkA.add(code("a2", provides("a2"), requires("a1"))); chunkA.add(code("a1", provides("a1"), requires())); chunkA.add(code("base.js", BASEJS, provides(), requires())); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, DependencyOptions.sortOnly()); assertInputs(chunkA, "base.js", "a1", "a2"); assertThat(sourceNames(results)).containsExactly("base.js", "a1", "a2").inOrder(); } @Test public void testNoFiles() throws Exception { makeDeps(); makeGraph(); ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, DependencyOptions.sortOnly()); assertThat(results).isEmpty(); } @Test public void testToJson() { makeDeps(); makeGraph(); JsonArray modules = graph.toJson(); assertThat(modules).hasSize(7); for (int i = 0; i < modules.size(); i++) { JsonObject m = modules.get(i).getAsJsonObject(); assertThat(m.get("name")).isNotNull(); assertThat(m.get("dependencies")).isNotNull(); assertThat(m.get("transitive-dependencies")).isNotNull(); assertThat(m.get("inputs")).isNotNull(); } JsonObject m = modules.get(3).getAsJsonObject(); assertThat(m.get("name").getAsString()).isEqualTo("chunkD"); assertThat(m.get("dependencies").getAsJsonArray().toString()).isEqualTo("[\"chunkB\"]"); assertThat(m.get("transitive-dependencies").getAsJsonArray()).hasSize(2); assertThat(m.get("inputs").getAsJsonArray().toString()).isEqualTo("[]"); } private List<CompilerInput> setUpManageDependenciesTest() { List<CompilerInput> inputs = new ArrayList<>(); chunkA.add(code("a1", provides("a1"), requires())); chunkA.add(code("a2", provides("a2"), requires("a1"))); chunkA.add(code("a3", provides(), requires("a1"))); chunkB.add(code("b1", provides("b1"), requires("a2"))); chunkB.add(code("b2", provides(), requires("a1", "a2"))); chunkC.add(code("c1", provides("c1"), requires("a1"))); chunkC.add(code("c2", provides("c2"), requires("c1"))); chunkE.add(code("e1", provides(), requires("c1"))); chunkE.add(code("e2", provides(), requires("c1"))); inputs.addAll(chunkA.getInputs()); inputs.addAll(chunkB.getInputs()); inputs.addAll(chunkC.getInputs()); inputs.addAll(chunkE.getInputs()); for (CompilerInput input : inputs) { input.setCompiler(compiler); } return inputs; } @Test public void testGoogBaseOrderedCorrectly() throws Exception { makeDeps(); makeGraph(); List<SourceFile> sourceFiles = new ArrayList<>(); sourceFiles.add(code("a9", provides("a9"), requires())); sourceFiles.add(code("a8", provides("a8"), requires())); sourceFiles.add(code("a7", provides("a7"), requires())); sourceFiles.add(code("a6", provides("a6"), requires())); sourceFiles.add(code("a5", provides("a5"), requires())); sourceFiles.add(code("a4", provides("a4"), requires())); sourceFiles.add(code("a3", provides("a3"), requires())); sourceFiles.add(code("a2", provides("a2"), requires())); sourceFiles.add( code("a1", provides("a1"), requires("a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9"))); sourceFiles.add(code("base.js", BASEJS, provides(), requires())); DependencyOptions depOptions = DependencyOptions.pruneForEntryPoints(ImmutableList.of(ModuleIdentifier.forClosure("a1"))); for (int i = 0; i < 10; i++) { shuffle(sourceFiles); chunkA.removeAll(); for (SourceFile sourceFile : sourceFiles) { chunkA.add(sourceFile); } for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, depOptions); assertInputs(chunkA, "base.js", "a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9", "a1"); assertThat(sourceNames(results)) .containsExactly("base.js", "a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9", "a1") .inOrder(); } } @Test public void testProperEs6ModuleOrdering() throws Exception { makeDeps(); makeGraph(); List<SourceFile> sourceFiles = new ArrayList<>(); sourceFiles.add(code("/entry.js", provides(), requires())); sourceFiles.add(code("/a/a.js", provides(), requires())); sourceFiles.add(code("/a/b.js", provides(), requires())); sourceFiles.add(code("/b/a.js", provides(), requires())); sourceFiles.add(code("/b/b.js", provides(), requires())); sourceFiles.add(code("/b/c.js", provides(), requires())); sourceFiles.add(code("/important.js", provides(), requires())); HashMap<String, List<String>> orderedRequires = new HashMap<>(); orderedRequires.put( "/entry.js", ImmutableList.of( ModuleIdentifier.forFile("/b/b.js").toString(), ModuleIdentifier.forFile("/b/a.js").toString(), ModuleIdentifier.forFile("/important.js").toString(), ModuleIdentifier.forFile("/a/b.js").toString(), ModuleIdentifier.forFile("/a/a.js").toString())); orderedRequires.put("/a/a.js", ImmutableList.of()); orderedRequires.put("/a/b.js", ImmutableList.of()); orderedRequires.put("/b/a.js", ImmutableList.of()); orderedRequires.put( "/b/b.js", ImmutableList.of(ModuleIdentifier.forFile("/b/c.js").toString())); orderedRequires.put("/b/c.js", ImmutableList.of()); orderedRequires.put("/important.js", ImmutableList.of()); DependencyOptions depOptions = DependencyOptions.pruneForEntryPoints( ImmutableList.of(ModuleIdentifier.forFile("/entry.js"))); for (int iterationCount = 0; iterationCount < 10; iterationCount++) { shuffle(sourceFiles); chunkA.removeAll(); for (SourceFile sourceFile : sourceFiles) { chunkA.add(sourceFile); } for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); for (String require : orderedRequires.get(input.getSourceFile().getName())) { input.addOrderedRequire(Require.compilerModule(require)); } input.setHasFullParseDependencyInfo(true); } ImmutableList<CompilerInput> results = graph.manageDependencies(compiler, depOptions); assertInputs( chunkA, "/b/c.js", "/b/b.js", "/b/a.js", "/important.js", "/a/b.js", "/a/a.js", "/entry.js"); assertThat(sourceNames(results)) .containsExactly( "/b/c.js", "/b/b.js", "/b/a.js", "/important.js", "/a/b.js", "/a/a.js", "/entry.js") .inOrder(); } } @Test public void testMoveMarkedWeakSources() throws Exception { makeDeps(); SourceFile weak1 = SourceFile.fromCode("weak1", "", SourceKind.WEAK); SourceFile weak2 = SourceFile.fromCode("weak2", "", SourceKind.WEAK); SourceFile strong1 = SourceFile.fromCode("strong1", "", SourceKind.STRONG); SourceFile strong2 = SourceFile.fromCode("strong2", "", SourceKind.STRONG); chunkA.add(weak1); chunkA.add(strong1); chunkA.add(weak2); chunkA.add(strong2); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); assertThat(getWeakModule().getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(weak1, weak2); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(strong1, strong2); } @Test public void testMoveMarkedWeakSourcesDuringManageDepsSortOnly() throws Exception { makeDeps(); SourceFile weak1 = SourceFile.fromCode("weak1", "", SourceKind.WEAK); SourceFile weak2 = SourceFile.fromCode("weak2", "", SourceKind.WEAK); SourceFile strong1 = SourceFile.fromCode("strong1", "", SourceKind.STRONG); SourceFile strong2 = SourceFile.fromCode("strong2", "", SourceKind.STRONG); chunkA.add(weak1); chunkA.add(strong1); chunkA.add(weak2); chunkA.add(strong2); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); graph.manageDependencies(compiler, DependencyOptions.sortOnly()); assertThat(getWeakModule().getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(weak1, weak2); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(strong1, strong2); } @Test public void testIgnoreMarkedWeakSourcesDuringManageDepsPrune() throws Exception { makeDeps(); SourceFile weak1 = SourceFile.fromCode("weak1", "", SourceKind.WEAK); SourceFile weak2 = SourceFile.fromCode("weak2", "", SourceKind.WEAK); SourceFile strong1 = SourceFile.fromCode("strong1", "", SourceKind.STRONG); SourceFile strong2 = SourceFile.fromCode("strong2", "", SourceKind.STRONG); chunkA.add(weak1); chunkA.add(strong1); chunkA.add(weak2); chunkA.add(strong2); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); graph.manageDependencies( compiler, DependencyOptions.pruneForEntryPoints( ImmutableList.of( ModuleIdentifier.forFile("strong1"), ModuleIdentifier.forFile("strong2")))); assertThat( getWeakModule().getInputs().stream() .map(CompilerInput::getSourceFile) .collect(Collectors.toList())) .isEmpty(); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(strong1, strong2); } @Test public void testIgnoreDepsOfMarkedWeakSourcesDuringManageDepsPrune() throws Exception { makeDeps(); SourceFile weak1 = SourceFile.fromCode("weak1", "goog.requireType('weak1weak');", SourceKind.WEAK); SourceFile weak1weak = SourceFile.fromCode("weak1weak", "goog.provide('weak1weak');", SourceKind.WEAK); SourceFile weak2 = SourceFile.fromCode("weak2", "goog.require('weak2strong');", SourceKind.WEAK); SourceFile weak2strong = SourceFile.fromCode("weak2strong", "goog.provide('weak2strong');", SourceKind.WEAK); SourceFile strong1 = SourceFile.fromCode("strong1", "", SourceKind.STRONG); SourceFile strong2 = SourceFile.fromCode("strong2", "", SourceKind.STRONG); chunkA.add(weak1); chunkA.add(strong1); chunkA.add(weak2); chunkA.add(strong2); chunkA.add(weak1weak); chunkA.add(weak2strong); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); graph.manageDependencies( compiler, DependencyOptions.pruneForEntryPoints( ImmutableList.of( ModuleIdentifier.forFile("strong1"), ModuleIdentifier.forFile("strong2")))); assertThat( getWeakModule().getInputs().stream() .map(CompilerInput::getSourceFile) .collect(Collectors.toList())) .isEmpty(); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(strong1, strong2); } @Test public void testMoveImplicitWeakSourcesFromMoocherDuringManageDepsLegacyPrune() throws Exception { makeDeps(); SourceFile weak = SourceFile.fromCode("weak", "goog.provide('weak');"); SourceFile strong = SourceFile.fromCode("strong", ""); SourceFile moocher = SourceFile.fromCode("moocher", "goog.requireType('weak');"); chunkA.add(weak); chunkA.add(strong); chunkA.add(moocher); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); graph.manageDependencies( compiler, DependencyOptions.pruneLegacyForEntryPoints( ImmutableList.of(ModuleIdentifier.forFile("strong")))); assertThat(getWeakModule().getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(weak); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(strong, moocher); } @Test public void testImplicitWeakSourcesNotMovedDuringManageDepsSortOnly() throws Exception { makeDeps(); SourceFile weak1 = SourceFile.fromCode("weak1", "goog.provide('weak1');"); SourceFile weak2 = SourceFile.fromCode("weak2", "goog.provide('weak2');"); SourceFile strong1 = SourceFile.fromCode("strong1", "goog.requireType('weak1');"); SourceFile strong2 = SourceFile.fromCode("strong2", "goog.requireType('weak2');"); chunkA.add(weak1); chunkA.add(strong1); chunkA.add(weak2); chunkA.add(strong2); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); graph.manageDependencies(compiler, DependencyOptions.sortOnly()); assertThat(getWeakModule().getInputs()).isEmpty(); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(weak1, strong1, weak2, strong2); } @Test public void testImplicitWeakSourcesMovedDuringManageDepsPrune() throws Exception { makeDeps(); SourceFile weak1 = SourceFile.fromCode("weak1", "goog.provide('weak1');"); SourceFile weak2 = SourceFile.fromCode("weak2", "goog.provide('weak2');"); SourceFile strong1 = SourceFile.fromCode("strong1", "goog.requireType('weak1');"); SourceFile strong2 = SourceFile.fromCode("strong2", "goog.requireType('weak2');"); chunkA.add(weak1); chunkA.add(strong1); chunkA.add(weak2); chunkA.add(strong2); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); graph.manageDependencies( compiler, DependencyOptions.pruneForEntryPoints( ImmutableList.of( ModuleIdentifier.forFile("strong1"), ModuleIdentifier.forFile("strong2")))); assertThat(getWeakModule().getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(weak1, weak2); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(strong1, strong2); } @Test public void testTransitiveWeakSources() throws Exception { makeDeps(); SourceFile weak1 = SourceFile.fromCode( "weak1", "goog.provide('weak1'); goog.requireType('weak2'); goog.require('strongFromWeak');"); SourceFile strongFromWeak = SourceFile.fromCode("strongFromWeak", "goog.provide('strongFromWeak');"); SourceFile weak2 = SourceFile.fromCode("weak2", "goog.provide('weak2'); goog.requireType('weak3');"); SourceFile weak3 = SourceFile.fromCode("weak3", "goog.provide('weak3');"); SourceFile strong1 = SourceFile.fromCode("strong1", "goog.requireType('weak1');"); chunkA.add(weak1); chunkA.add(strong1); chunkA.add(weak2); chunkA.add(weak3); chunkA.add(strongFromWeak); for (CompilerInput input : chunkA.getInputs()) { input.setCompiler(compiler); } makeGraph(); graph.manageDependencies( compiler, DependencyOptions.pruneForEntryPoints( ImmutableList.of(ModuleIdentifier.forFile("strong1")))); assertThat(getWeakModule().getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(weak1, weak2, weak3, strongFromWeak); assertThat(chunkA.getInputs().stream().map(CompilerInput::getSourceFile)) .containsExactly(strong1); } private void assertInputs(JSChunk chunk, String... sourceNames) { assertThat(sourceNames(chunk.getInputs())).isEqualTo(ImmutableList.copyOf(sourceNames)); } private List<String> sourceNames(List<CompilerInput> inputs) { List<String> inputNames = new ArrayList<>(); for (CompilerInput input : inputs) { inputNames.add(input.getName()); } return inputNames; } private SourceFile code(String sourceName, List<String> provides, List<String> requires) { return code(sourceName, "", provides, requires); } private SourceFile code( String sourceName, String source, List<String> provides, List<String> requires) { String text = ""; for (String p : provides) { text += "goog.provide('" + p + "');\n"; } for (String r : requires) { text += "goog.require('" + r + "');\n"; } return SourceFile.fromCode(sourceName, text + source); } private ImmutableList<String> provides(String... strings) { return ImmutableList.copyOf(strings); } private ImmutableList<String> requires(String... strings) { return ImmutableList.copyOf(strings); } private void assertSmallestCoveringSubtree( JSChunk expected, JSChunk parentTree, JSChunk... modules) { assertSmallestCoveringSubtree(expected, graph, parentTree, modules); } private void assertSmallestCoveringSubtree( JSChunk expected, JSChunkGraph graph, JSChunk parentTree, JSChunk... modules) { BitSet modulesBitSet = new BitSet(); for (JSChunk m : modules) { modulesBitSet.set(m.getIndex()); } assertSmallestCoveringSubtree(expected, graph, parentTree, modulesBitSet); } private void assertSmallestCoveringSubtree( JSChunk expected, JSChunkGraph graph, JSChunk parentTree, BitSet modules) { JSChunk actual = graph.getSmallestCoveringSubtree(parentTree, modules); assertWithMessage( "Smallest covering subtree of %s in %s should be %s but was %s", parentTree, modules, expected, actual) .that(actual) .isEqualTo(expected); } private void assertDeepestCommonDepInclusive(JSChunk expected, JSChunk m1, JSChunk m2) { assertDeepestCommonDepOneWay(expected, m1, m2, true); assertDeepestCommonDepOneWay(expected, m2, m1, true); } private void assertDeepestCommonDep(@Nullable JSChunk expected, JSChunk m1, JSChunk m2) { assertDeepestCommonDepOneWay(expected, m1, m2, false); assertDeepestCommonDepOneWay(expected, m2, m1, false); } private void assertDeepestCommonDepOneWay( JSChunk expected, JSChunk m1, JSChunk m2, boolean inclusive) { JSChunk actual = inclusive ? graph.getDeepestCommonDependencyInclusive(m1, m2) : graph.getDeepestCommonDependency(m1, m2); if (actual != expected) { assertWithMessage( "Deepest common dep of %s and %s should be %s but was %s", m1.getName(), m2.getName(), expected == null ? "null" : expected.getName(), actual == null ? "null" : actual.getName()) .fail(); } } private void assertTransitiveDepsDeepestFirst(JSChunk m, JSChunk... deps) { Iterable<JSChunk> actual = graph.getTransitiveDepsDeepestFirst(m); assertThat(Arrays.toString(Iterables.toArray(actual, JSChunk.class))) .isEqualTo(Arrays.toString(deps)); } }
googleapis/google-cloud-java
36,665
java-cloudsupport/proto-google-cloud-cloudsupport-v2beta/src/main/java/com/google/cloud/support/v2beta/ListCasesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/support/v2beta/case_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.support.v2beta; /** * * * <pre> * The response message for the ListCases endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2beta.ListCasesResponse} */ public final class ListCasesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.support.v2beta.ListCasesResponse) ListCasesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCasesResponse.newBuilder() to construct. private ListCasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCasesResponse() { cases_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCasesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_ListCasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_ListCasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.ListCasesResponse.class, com.google.cloud.support.v2beta.ListCasesResponse.Builder.class); } public static final int CASES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.support.v2beta.Case> cases_; /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.support.v2beta.Case> getCasesList() { return cases_; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.support.v2beta.CaseOrBuilder> getCasesOrBuilderList() { return cases_; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ @java.lang.Override public int getCasesCount() { return cases_.size(); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2beta.Case getCases(int index) { return cases_.get(index); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2beta.CaseOrBuilder getCasesOrBuilder(int index) { return cases_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < cases_.size(); i++) { output.writeMessage(1, cases_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < cases_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, cases_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.support.v2beta.ListCasesResponse)) { return super.equals(obj); } com.google.cloud.support.v2beta.ListCasesResponse other = (com.google.cloud.support.v2beta.ListCasesResponse) obj; if (!getCasesList().equals(other.getCasesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCasesCount() > 0) { hash = (37 * hash) + CASES_FIELD_NUMBER; hash = (53 * hash) + getCasesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCasesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ListCasesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2beta.ListCasesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.support.v2beta.ListCasesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the ListCases endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2beta.ListCasesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.support.v2beta.ListCasesResponse) com.google.cloud.support.v2beta.ListCasesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_ListCasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_ListCasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2beta.ListCasesResponse.class, com.google.cloud.support.v2beta.ListCasesResponse.Builder.class); } // Construct using com.google.cloud.support.v2beta.ListCasesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (casesBuilder_ == null) { cases_ = java.util.Collections.emptyList(); } else { cases_ = null; casesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.support.v2beta.CaseServiceProto .internal_static_google_cloud_support_v2beta_ListCasesResponse_descriptor; } @java.lang.Override public com.google.cloud.support.v2beta.ListCasesResponse getDefaultInstanceForType() { return com.google.cloud.support.v2beta.ListCasesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.support.v2beta.ListCasesResponse build() { com.google.cloud.support.v2beta.ListCasesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.support.v2beta.ListCasesResponse buildPartial() { com.google.cloud.support.v2beta.ListCasesResponse result = new com.google.cloud.support.v2beta.ListCasesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.support.v2beta.ListCasesResponse result) { if (casesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { cases_ = java.util.Collections.unmodifiableList(cases_); bitField0_ = (bitField0_ & ~0x00000001); } result.cases_ = cases_; } else { result.cases_ = casesBuilder_.build(); } } private void buildPartial0(com.google.cloud.support.v2beta.ListCasesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.support.v2beta.ListCasesResponse) { return mergeFrom((com.google.cloud.support.v2beta.ListCasesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.support.v2beta.ListCasesResponse other) { if (other == com.google.cloud.support.v2beta.ListCasesResponse.getDefaultInstance()) return this; if (casesBuilder_ == null) { if (!other.cases_.isEmpty()) { if (cases_.isEmpty()) { cases_ = other.cases_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCasesIsMutable(); cases_.addAll(other.cases_); } onChanged(); } } else { if (!other.cases_.isEmpty()) { if (casesBuilder_.isEmpty()) { casesBuilder_.dispose(); casesBuilder_ = null; cases_ = other.cases_; bitField0_ = (bitField0_ & ~0x00000001); casesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCasesFieldBuilder() : null; } else { casesBuilder_.addAllMessages(other.cases_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.support.v2beta.Case m = input.readMessage( com.google.cloud.support.v2beta.Case.parser(), extensionRegistry); if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.add(m); } else { casesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.support.v2beta.Case> cases_ = java.util.Collections.emptyList(); private void ensureCasesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { cases_ = new java.util.ArrayList<com.google.cloud.support.v2beta.Case>(cases_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2beta.Case, com.google.cloud.support.v2beta.Case.Builder, com.google.cloud.support.v2beta.CaseOrBuilder> casesBuilder_; /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public java.util.List<com.google.cloud.support.v2beta.Case> getCasesList() { if (casesBuilder_ == null) { return java.util.Collections.unmodifiableList(cases_); } else { return casesBuilder_.getMessageList(); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public int getCasesCount() { if (casesBuilder_ == null) { return cases_.size(); } else { return casesBuilder_.getCount(); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public com.google.cloud.support.v2beta.Case getCases(int index) { if (casesBuilder_ == null) { return cases_.get(index); } else { return casesBuilder_.getMessage(index); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder setCases(int index, com.google.cloud.support.v2beta.Case value) { if (casesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCasesIsMutable(); cases_.set(index, value); onChanged(); } else { casesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder setCases( int index, com.google.cloud.support.v2beta.Case.Builder builderForValue) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.set(index, builderForValue.build()); onChanged(); } else { casesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder addCases(com.google.cloud.support.v2beta.Case value) { if (casesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCasesIsMutable(); cases_.add(value); onChanged(); } else { casesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder addCases(int index, com.google.cloud.support.v2beta.Case value) { if (casesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCasesIsMutable(); cases_.add(index, value); onChanged(); } else { casesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder addCases(com.google.cloud.support.v2beta.Case.Builder builderForValue) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.add(builderForValue.build()); onChanged(); } else { casesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder addCases( int index, com.google.cloud.support.v2beta.Case.Builder builderForValue) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.add(index, builderForValue.build()); onChanged(); } else { casesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder addAllCases( java.lang.Iterable<? extends com.google.cloud.support.v2beta.Case> values) { if (casesBuilder_ == null) { ensureCasesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, cases_); onChanged(); } else { casesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder clearCases() { if (casesBuilder_ == null) { cases_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { casesBuilder_.clear(); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public Builder removeCases(int index) { if (casesBuilder_ == null) { ensureCasesIsMutable(); cases_.remove(index); onChanged(); } else { casesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public com.google.cloud.support.v2beta.Case.Builder getCasesBuilder(int index) { return getCasesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public com.google.cloud.support.v2beta.CaseOrBuilder getCasesOrBuilder(int index) { if (casesBuilder_ == null) { return cases_.get(index); } else { return casesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public java.util.List<? extends com.google.cloud.support.v2beta.CaseOrBuilder> getCasesOrBuilderList() { if (casesBuilder_ != null) { return casesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(cases_); } } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public com.google.cloud.support.v2beta.Case.Builder addCasesBuilder() { return getCasesFieldBuilder() .addBuilder(com.google.cloud.support.v2beta.Case.getDefaultInstance()); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public com.google.cloud.support.v2beta.Case.Builder addCasesBuilder(int index) { return getCasesFieldBuilder() .addBuilder(index, com.google.cloud.support.v2beta.Case.getDefaultInstance()); } /** * * * <pre> * The list of cases associated with the parent after any * filters have been applied. * </pre> * * <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code> */ public java.util.List<com.google.cloud.support.v2beta.Case.Builder> getCasesBuilderList() { return getCasesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2beta.Case, com.google.cloud.support.v2beta.Case.Builder, com.google.cloud.support.v2beta.CaseOrBuilder> getCasesFieldBuilder() { if (casesBuilder_ == null) { casesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2beta.Case, com.google.cloud.support.v2beta.Case.Builder, com.google.cloud.support.v2beta.CaseOrBuilder>( cases_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); cases_ = null; } return casesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.list` requests. If unspecified, there are no * more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.support.v2beta.ListCasesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.support.v2beta.ListCasesResponse) private static final com.google.cloud.support.v2beta.ListCasesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.support.v2beta.ListCasesResponse(); } public static com.google.cloud.support.v2beta.ListCasesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCasesResponse> PARSER = new com.google.protobuf.AbstractParser<ListCasesResponse>() { @java.lang.Override public ListCasesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCasesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCasesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.support.v2beta.ListCasesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/nosql
36,821
kvmain/src/main/java/oracle/kv/impl/api/ops/TableQueryHandler.java
/*- * Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package oracle.kv.impl.api.ops; import java.time.temporal.ChronoUnit; import java.time.Clock; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.TreeSet; import oracle.kv.MetadataNotFoundException; import oracle.kv.PrepareQueryException; import oracle.kv.impl.api.TopologyManager; import oracle.kv.impl.api.ops.InternalOperation.OpCode; import oracle.kv.impl.api.ops.Result.QueryResult; import oracle.kv.impl.api.table.FieldValueImpl; import oracle.kv.impl.api.table.TableMetadata; import oracle.kv.impl.api.table.TableMetadataHelper; import oracle.kv.impl.api.table.TupleValue; import oracle.kv.impl.fault.RNUnavailableException; import oracle.kv.impl.fault.WrappedClientException; import oracle.kv.impl.metadata.Metadata.MetadataType; import oracle.kv.impl.query.QueryException; import oracle.kv.impl.query.QueryStateException; import oracle.kv.impl.query.QueryRuntimeException; import oracle.kv.impl.query.runtime.PartitionUnionIter; import oracle.kv.impl.query.runtime.PartitionUnionIter.PartitionedResults; import oracle.kv.impl.query.runtime.PlanIter; import oracle.kv.impl.query.runtime.PlanIter.PlanIterKind; import oracle.kv.impl.query.runtime.ResumeInfo; import oracle.kv.impl.query.runtime.RuntimeControlBlock; import oracle.kv.impl.query.runtime.server.ServerIterFactoryImpl; import oracle.kv.impl.rep.PartitionManager; import oracle.kv.impl.rep.RepNode; import oracle.kv.impl.rep.migration.generation.PartitionGenerationTable; import oracle.kv.impl.security.KVStorePrivilege; import oracle.kv.impl.security.SystemPrivilege; import oracle.kv.impl.topo.PartitionId; import oracle.kv.impl.topo.RepGroupId; import oracle.kv.impl.topo.Topology; import oracle.kv.impl.util.CommonLoggerUtils; import oracle.kv.query.ExecuteOptions; import com.sleepycat.je.LockConflictException; import com.sleepycat.je.Database; import com.sleepycat.je.Transaction; /** * Server handler for {@link TableQuery}. */ public class TableQueryHandler extends InternalOperationHandler<TableQuery> { public static double WAIT_ELASTICITY_MASTER_PERCENT = 0.75; public static double WAIT_ELASTICITY_REPLICA_PERCENT = 0.25; public static int WAIT_MS_FOR_ELASTICITY = 100; TableQueryHandler(OperationHandler handler, OpCode opCode) { super(handler, opCode, TableQuery.class); } @Override List<? extends KVStorePrivilege> getRequiredPrivileges(TableQuery op) { /* * Checks the basic privilege for authentication here, and leave the * keyspace checking and the table access checking in * {@code verifyTableAccess()}. */ return SystemPrivilege.usrviewPrivList; } /** * Returns a TableMetadataHelper instance available on this node. */ private TableMetadataHelper getMetadataHelper() { final TableMetadata md = (TableMetadata) getRepNode().getMetadata(MetadataType.TABLE); String msg = null; if (md == null) { msg = "Query execution unable to get metadata from " + "RN: " + getRepNode().getRepNodeId(); } else if (md.isEmpty()) { msg = "Metadata not initialised with tables at RN: " + getRepNode().getRepNodeId(); } if (msg != null) { getLogger().warning(msg); throw new MetadataNotFoundException(msg, operationHandler.getTableMetadataSeqNum()); } return md; } @Override Result execute(TableQuery op, Transaction txn, PartitionId partitionId) { TableMetadataHelper mdHelper = getMetadataHelper(); ExecuteOptions options = new ExecuteOptions(). setRegionId(op.getLocalRegionId()). setDoTombstone(op.doTombstone()); /* * Save the ResumeInfo before execution. * * The ResumeInfo can be modified during execution, and the * query request might need to be forwarded to another RN to * re-execute in some cases(e.g. master transfer). In those * cases, the ResumeInfo should be reverted to its previous * value before execution. */ ResumeInfo savedResumeInfo = new ResumeInfo(op.getResumeInfo()); QueryResult result; String trace = null; try { do { RuntimeControlBlock rcb = new RuntimeControlBlock(getLogger(), mdHelper, options, op, this, new ServerIterFactoryImpl(txn, operationHandler)); if (trace != null) { rcb.setTrace(trace); } result = executeQueryPlan(op, rcb, partitionId, savedResumeInfo); if (result == null) { trace = rcb.getTrace(); } } while (result == null); return result; } catch (Throwable t) { /* * Restore the ResumeInfo to the previous value before execution, so * that it can be re-executed on other RN if needed. * * We don't know which error will cause the request to be forwarded, * just always revert the ResumeInfo, it should be harmless. */ op.setResumeInfo(savedResumeInfo); throw t; } } private QueryResult executeQueryPlan( TableQuery op, RuntimeControlBlock rcb, PartitionId pid, ResumeInfo savedResumeInfo) { RepNode rn = getRepNode(); TopologyManager topoManager = rn.getTopologyManager(); ResumeInfo ri = op.getResumeInfo(); ri.setRCB(rcb); ri.setCurrentPid(pid.getPartitionId()); Topology baseTopo = rcb.getBaseTopo(); int baseTopoNum = ri.getBaseTopoNum(); int batchSize = op.getBatchSize(); PlanIter queryPlan = op.getQueryPlan(); boolean inSortPhase1 = false; PartitionUnionIter partUnionIter = null; Throwable exception = null; boolean gotResult = false; boolean more = false; List<FieldValueImpl> results = new ArrayList<FieldValueImpl>(batchSize); int[] pids = null; int[] numResultsPerPid = null; ResumeInfo[] resumeInfos = null; boolean isAllShardsQuery = false; boolean isSinglePartitionQuery = false; boolean isNormalScan = false; String batchName = null; if (rcb.getTraceLevel() >= 1) { batchName = (Clock.systemUTC().instant().truncatedTo(ChronoUnit.MILLIS) + " " + rn.getRepNodeId().getFullName()); } if (op.getOpCode() == OpCode.QUERY_MULTI_PARTITION) { if (queryPlan.getKind() == PlanIterKind.PARTITION_UNION) { partUnionIter = (PartitionUnionIter)queryPlan; inSortPhase1 = ri.isInSortPhase1(); } else if (queryPlan.getKind() == PlanIterKind.GROUP && queryPlan.getInputIter().getKind() == PlanIterKind.PARTITION_UNION) { partUnionIter = (PartitionUnionIter)queryPlan.getInputIter(); assert(!partUnionIter.doesSort()); } } else if (op.getOpCode() == OpCode.QUERY_MULTI_SHARD) { isAllShardsQuery = true; } else { isSinglePartitionQuery = true; } try { queryPlan.open(rcb); if (inSortPhase1 && partUnionIter != null) { partUnionIter.next(rcb); PartitionedResults res = partUnionIter.getPartitionedResults(rcb); results = res.results; pids = res.pids; numResultsPerPid = res.numResultsPerPid; resumeInfos = res.resumeInfos; ri = rcb.getResumeInfo(); } else { if (rcb.getTraceLevel() >= 1) { rcb.trace("TableQueryHandler: Executing query on " + pid + " with baseTopoNum " + baseTopoNum + " and ResumeInfo :\n" + ri); if (rcb.getTraceLevel() >= 4) { rcb.trace("Batch size: " + batchSize + " timeout: " + op.getTimeout()); rcb.trace(queryPlan.display(true)); } } if (baseTopoNum >= 0) { if (isAllShardsQuery) { if (baseTopo == null) { baseTopo = getBaseTopo(rcb, baseTopoNum); } isNormalScan = (ri.getVirtualScanPid() < 0); if (isNormalScan) { handlePartitionMigrations(rcb); } else { ensureMigratedPartition(rcb); } } else if (isSinglePartitionQuery && rcb.usesIndex()) { topoManager.addQuery(rcb); if (!checkPartitionIsHere(rcb, pid)) { if (rcb.getTraceLevel() >= 1) { rcb.trace(true, "Partition " + pid.getPartitionId() + " is not found here. " + "Throwing RNUnavailableException"); } throw new RNUnavailableException( " Partition " + pid.getPartitionId() + " is not found in this RN"); } } } while (true) { gotResult = queryPlan.next(rcb); if (!gotResult) { break; } addResult(rcb, ri, queryPlan, results); if (batchSize > 0 && results.size() >= batchSize) { if (rcb.getTraceLevel() >= 1) { rcb.trace("TableQueryHandler: query needs to " + "suspend because it has reached the " + "batch size. Num results = " + results.size()); } rcb.setNeedToSuspend(true); } } if (rcb.needToSuspend()) { byte[] primResumeKey = ri.getPrimResumeKey(0); byte[] secResumeKey = ri.getSecResumeKey(0); if (primResumeKey != null || secResumeKey != null) { more = true; } else { more = false; } } else { more = false; } ri.setNumResultsComputed(results.size()); if (!more) { ri.setCurrentIndexRange(0, 0); } if (rcb.getTraceLevel() >= 1) { rcb.trace("TableQueryHandler: Produced a batch of " + results.size() + " results on " + pid + " number of KB read = " + op.getReadKB() + " more results = " + more + " reachedLimit = " + rcb.getReachedLimit() + " needToSuspend = " + rcb.needToSuspend()); if (rcb.getTraceLevel() >= 2) { rcb.trace("Resume Info =\n" + ri.toString()); } } } } catch (LockConflictException lce) { /* let the caller handle this */ exception = lce; throw lce; } catch (QueryException qe) { getLogger().fine("Query execution failed: " + qe); /* * Turn this into a wrapped IllegalArgumentException so that it can * be passed to the client. */ exception = qe; throw qe.getWrappedIllegalArgument(); } catch (QueryStateException qse) { exception = qse; /* This exception indicates a bug or problem in the engine. Wrap it * into one that can be thrown to the client. Specifically, a * WrappedClientException is thrown (which is a RuntimeException) */ getLogger().warning(qse.toString()); qse.throwClientException(); } catch (IllegalArgumentException e) { exception = e; throw new WrappedClientException(e); } catch (PrepareQueryException pqe) { exception = pqe; throw new WrappedClientException(pqe); } catch (RNUnavailableException rnu) { throw rnu; } catch (RuntimeException re) { exception = re; /* * RuntimeException should not be caught here. REs should be * propagated up to the request handler as many are explicitly * handled there. The issue is that the query code can throw REs * which it should catch and turn into something specific (such as * an IAE). Until that time this needs to remain to avoid the * RN restarting due to some minor query issue. The request handler * will check to see if the cause of the QueryRuntimeException is * handled. If it isn't, it will rethrow the QueryRuntimeException, * which the ServiceFaultHandler will handle by sending the cause * the client rather than restarting. * * Detect NullPointerException and log it as SEVERE. NPEs * should not be considered user errors. */ if (re instanceof NullPointerException) { getLogger().severe("NullPointerException during query: " + re); } throw new QueryRuntimeException(re); } catch (Throwable t) { exception = t; throw t; } finally { if (exception != null && rcb.getTraceLevel() >= 1 && !rcb.doLogFileTracing()) { rcb.trace(true, "QUERY TRACE:\n" + rcb.getTrace() + "\n" + CommonLoggerUtils.getStackTrace(exception)); } if (baseTopoNum >= 0) { if (isAllShardsQuery || isSinglePartitionQuery) { topoManager.removeQuery(rcb); } } try { queryPlan.close(rcb); } catch (RuntimeException re) { if (exception == null) { throw new QueryRuntimeException(re); } } } boolean repeatBatch = false; if (baseTopoNum >= 0) { if (isAllShardsQuery) { if (isNormalScan) { checkForRestoredPartitions(rcb); } repeatBatch = checkForMigratedPartitions(rcb, isNormalScan); } else if (isSinglePartitionQuery && rcb.usesIndex()) { ArrayList<PartitionId> migratedPartitions = rcb.getMigratedPartitions(); for (PartitionId pid2 : migratedPartitions) { if (pid2.getPartitionId() == pid.getPartitionId()) { rcb.trace(true, "partition " + pid.getPartitionId() + " migrated during index scan. " + "Throwing RNUnavailableException"); throw new RNUnavailableException( " Partition " + pid.getPartitionId() + " migrated during index scan"); } } } } ri.addReadKB(op.getReadKB()); String batchTrace = null; if (rcb.getTraceLevel() >= 1) { batchTrace = rcb.getTrace(); } if (!repeatBatch) { return new QueryResult(getOpCode(), op.getReadKB(), op.getWriteKB(), results, op.getResultDef(), op.mayReturnNULL(), more, ri, rcb.getReachedLimit(), pids, numResultsPerPid, resumeInfos, batchName, batchTrace); } /* The batch has to aborted and repeated. So, throw away the results * produced during the batch. If the batch has reached the consumption * limit, it has to be repeated by the driver. In this case, an empty * result and the savedRI are sent back to the driver (via the proxy). * Otherwise, the batch is repeated locally. */ results.clear(); savedResumeInfo.addReadKB(op.getReadKB()); if (rcb.getReachedLimit()) { if (rcb.getTraceLevel() >= 1) { rcb.trace("Repeating query batch from the proxy"); rcb.trace(true, "Repeating query batch from the proxy"); } return new QueryResult(getOpCode(), op.getReadKB(), op.getWriteKB(), results, op.getResultDef(), op.mayReturnNULL(), more, savedResumeInfo, rcb.getReachedLimit(), pids, numResultsPerPid, resumeInfos, batchName, batchTrace); } if (rcb.getTraceLevel() >= 1) { rcb.trace("Repeating query batch locally"); rcb.trace(true, "Repeating query batch locally. Current trace =\n" + rcb.getTrace()); } op.setResumeInfo(new ResumeInfo(savedResumeInfo)); return null; } private void addResult( RuntimeControlBlock rcb, ResumeInfo ri, PlanIter queryPlan, List<FieldValueImpl> results) { FieldValueImpl res = rcb.getRegVal(queryPlan.getResultReg()); if (res.isTuple()) { res = ((TupleValue)res).toRecord(); } if (rcb.getTraceLevel() >= 1) { if (ri.getCurrentPid() > 0) { rcb.trace("TableQueryHandler: Produced result on " + "partition " + ri.getCurrentPid() + " :\n" + res); } else { rcb.trace("TableQueryHandler: Produced result :\n" + res); } } results.add(res); } public PartitionId[] getShardPids() { Set<PartitionId> pids = new TreeSet<PartitionId>(getRepNode().getPartitions()); return pids.toArray(new PartitionId[pids.size()]); } public int getNumPartitions() { return getRepNode().getTopology().getNumPartitions(); } private Topology getBaseTopo(RuntimeControlBlock rcb, int baseTopoNum) { RepNode rn = getRepNode(); TopologyManager topoManager = rn.getTopologyManager(); Topology currTopo = topoManager.getTopology(); Topology baseTopo; if (currTopo.getSequenceNumber() == baseTopoNum) { baseTopo = currTopo; } else { try { baseTopo = topoManager.getTopology( rn.getKVStore(), baseTopoNum, rcb.getRemainingTimeOrZero()); if (baseTopo == null) { throw new QueryStateException( "Failed to read base topology with " + "sequence number " + baseTopoNum + ", got null result"); } } catch (Throwable cause) { throw new QueryStateException( "Failed to read base topology with " + "sequence number " + baseTopoNum, cause); } } rcb.setBaseTopo(baseTopo); return baseTopo; } /* * This method is called for base-shard scans only, before query execution * starts. It does the following: * - registers the RCB with the MigrationManager so that the query will * receive notifications about partition migrations and migration * failures. * - finds partitions that have moved away from this shard since * the last query batch, and updates the ResumeInfo (the VSM) * accordingly. * - checks if any partitions that were already known to have moved away * are found back in this shard (due to migration failure at the target * shard). If any such partitions are found, an exception is thrown. */ private void handlePartitionMigrations(RuntimeControlBlock rcb) { RepNode rn = getRepNode(); TopologyManager topoManager = rn.getTopologyManager(); int sid = rn.getRepNodeId().getGroupId(); ResumeInfo ri = rcb.getResumeInfo(); int baseTopoNum = ri.getBaseTopoNum(); Topology baseTopo = rcb.getBaseTopo(); List<Integer> newMigratedPartitions = new ArrayList<Integer>(); List<Integer> targetShards = new ArrayList<Integer>(); topoManager.addQuery(rcb); waitServerStateUpdated(rcb, topoManager, rn.getPartitionManager(), baseTopo); /* Check that partitions that were found (in previous batches) to * have migrated out of this shard have not returned to this shard * (due to failures). To see why this is needed, consider the * following scenario: * Both the current and the previous batch scan the same shard. At * the start of the previous batch, partition P was found to have * migrated. In between the 2 batches, P returns to this shard. If we * allow the scan in this batch to proceed, we will generate duplicate * results when we execute the virtual-shard scan for P. * * Note: If this RN is a REPLICA, the partition may be still here due * to a delay in the replication of the MigrationDB. So, we wait a * little before throwing the exception. */ Set<Integer> migratedPartitions = ri.getMigratedPartitions(); if (migratedPartitions != null) { for (int pid : migratedPartitions) { PartitionId PID = new PartitionId(pid); boolean success = true; long waitTime = 0; while (true) { Topology localTopo = topoManager.getLocalTopology(); RepGroupId currSidForPid = localTopo.getRepGroupId(PID); RepGroupId origSidForPid = baseTopo.getRepGroupId(PID); if (sid == currSidForPid.getGroupId() && sid == origSidForPid.getGroupId()) { try { Thread.sleep(WAIT_MS_FOR_ELASTICITY); waitTime += WAIT_MS_FOR_ELASTICITY; if (waitTime > (rcb.getTimeoutMs() * WAIT_ELASTICITY_REPLICA_PERCENT)) { success = false; break; } } catch (InterruptedException e) { } } break; } if (!success) { if (rcb.getTraceLevel() >= 1) { rcb.trace("Partition " + pid + " found back in shard " + sid + " after having moved away from it."); } if (rn.isMaster()) { throw new QueryStateException( "Partition " + pid + " found back in shard " + sid + " after having moved away from it."); } throw new RNUnavailableException( "Partition " + pid + " was supposed to have " + "migrated out of shard " + sid + " but it is still here", true); } } } final boolean updated = topoManager.callWithObjectLock(() -> { Topology currTopo = topoManager.getTopology(); Topology localTopo = topoManager.getLocalTopology(); if (baseTopoNum == currTopo.getSequenceNumber() && (localTopo.getSequenceNumber() == currTopo.getSequenceNumber()) && (!localTopo.appliedLocalizationChange())) { return false; } /* Find the base partitions of this shard that are not known * already to have migrated. Among these partitions find the ones * that have migrated out of this shard since the end of the * previous batch and update the ri.theVSM accordingly. */ List<PartitionId> expectedPartitions = baseTopo. getPartitionsInShard(sid, migratedPartitions); for (PartitionId pid : expectedPartitions) { RepGroupId sidForPid = localTopo.getRepGroupId(pid); if (sid != sidForPid.getGroupId()) { newMigratedPartitions.add(pid.getPartitionId()); targetShards.add(sidForPid.getGroupId()); } } return true; }); if (!updated) { return; } if (!newMigratedPartitions.isEmpty()) { int resumePid = -1; if (ri.getPrimResumeKey(0) != null) { resumePid = rn.getPartitionId(ri.getPrimResumeKey(0)). getPartitionId(); } for (int i = 0; i < newMigratedPartitions.size(); ++i) { int pid2 = newMigratedPartitions.get(i); int sid2 = targetShards.get(i); ri.addVirtualScan(resumePid, pid2, sid2); } } } /** * Waits until that * (1) the topology T (could be either local or official) is updated to one * that is after the base topology; * (2) all partitions in T on this RN has the partition DB handle opened * and updated in PartitionManager; * (3) all partitions in T on this RN has the partition generation opened. */ private void waitServerStateUpdated( RuntimeControlBlock rcb, TopologyManager topoManager, PartitionManager partitionManager, Topology baseTopo) { RepNode rn = getRepNode(); Topology targetTopo = topoManager.callWithObjectLock(() -> { final Topology localTopo = topoManager.getLocalTopology(); return (localTopo.getSequenceNumber() >= baseTopo.getSequenceNumber() ? localTopo : baseTopo); }); long waitTime = (long)(rn.isMaster() ? rcb.getTimeoutMs() * WAIT_ELASTICITY_MASTER_PERCENT : rcb.getTimeoutMs() * WAIT_ELASTICITY_REPLICA_PERCENT); Topology updated = null; try { updated = partitionManager.awaitUpdate(targetTopo, waitTime); } catch (InterruptedException e) { throw new QueryStateException( "Interrupted while waiting for RN state updated."); } if (updated != null) { final Topology t = updated; getLogger().fine( "Server updated to topology (" + t.getSequenceNumber() + ", " + t.getLocalizationNumber() + ")"); return; } if (rn.isMaster()) { throw new QueryStateException( "Wait timeout at master " + "for server state updated past sequence number " + targetTopo.getSequenceNumber()); } throw new RNUnavailableException( "Wait timeout at replica " + "for server state updated past sequence number " + targetTopo.getSequenceNumber()); } /* * This method is called for virtual-shard scans only, before query execution * starts. It checks whether the partition that is supposed to have migrated * to this shard is indeed here. If the partition is missing, the method * waits for some time, and if it is still missing, it throws an exception. * * The partition may be missing either due to a delay in updating the * local topology or the PartitionManager, or due to migration failure. */ private void ensureMigratedPartition(RuntimeControlBlock rcb) { RepNode rn = getRepNode(); TopologyManager topoManager = rn.getTopologyManager(); int sid = rn.getRepNodeId().getGroupId(); ResumeInfo ri = rcb.getResumeInfo(); PartitionId pid = new PartitionId(ri.getVirtualScanPid()); topoManager.addQuery(rcb); if (!checkPartitionIsHere(rcb, pid)) { if (rcb.getTraceLevel() >= 0) { rcb.trace("Partition " + pid + " was supposed to have " + "migrated to shard " + sid + " but is not found here"); } if (rn.isMaster()) { throw new QueryStateException( "Partition " + pid + " was supposed to have " + "migrated to shard " + sid + " but is not found here"); } throw new RNUnavailableException( "Partition " + pid + " was supposed to have " + "migrated to shard " + sid + " but is not found here", true); } } private boolean checkPartitionIsHere( RuntimeControlBlock rcb, PartitionId pid) { RepNode rn = getRepNode(); PartitionManager partManager = rn.getPartitionManager(); TopologyManager topoManager = rn.getTopologyManager(); PartitionGenerationTable partitionGenerationTable = rn.getPartGenTable(); int sid = rn.getRepNodeId().getGroupId(); long waitTime = 0; while (true) { final boolean done = topoManager.callWithObjectLock(() -> { Topology localTopo = topoManager.getLocalTopology(); RepGroupId sidForPid = localTopo.getRepGroupId(pid); if (sid == sidForPid.getGroupId()) { /* * Checks that the partition DB is open and the associated * generation is ready. We must check the DB first, then * the generation since isPartitionOpen always return true * if there is no migration ever. */ Database db = partManager.getPartitionDB(pid); if (db != null && db.getEnvironment() != null && db.getEnvironment().isValid() && partitionGenerationTable.isPartitionOpen(pid)) { return true; } if (rcb.getTraceLevel() >= 2) { rcb.trace("Partition " + pid.getPartitionId() + " not found in PartitionManager. " + "Putting query to sleep"); } } else if (rcb.getTraceLevel() >= 2) { rcb.trace("Partition " + pid.getPartitionId() + " not found in local topology. " + "Putting query to sleep"); } return false; }); if (done) { return true; } try { Thread.sleep(WAIT_MS_FOR_ELASTICITY); waitTime += WAIT_MS_FOR_ELASTICITY; if (rn.isMaster()) { if (waitTime > (rcb.getTimeoutMs() * WAIT_ELASTICITY_MASTER_PERCENT)) { break; } } else if (waitTime > (rcb.getTimeoutMs() * WAIT_ELASTICITY_REPLICA_PERCENT)) { break; } } catch (InterruptedException e) { } continue; } return false; } /* * Method called at the end of a batch to check if any partitions moved * out of this shard during the batch. If this is normal shard scan, the * method returns true, and as a result, the batch will be aborted and * then repeated. If this is a virtual shard scan, an exception is thrown * (this can happen if a query span more than one elasticity ops, which * is not supported by the query-elasticity algorithm) */ private boolean checkForMigratedPartitions( RuntimeControlBlock rcb, boolean isNormalScan) { int sid = getRepNode().getRepNodeId().getGroupId(); Topology baseTopo = rcb.getBaseTopo(); ArrayList<PartitionId> migratedPartitions = rcb.getMigratedPartitions(); if (isNormalScan) { for (PartitionId pid : migratedPartitions) { if (baseTopo.getRepGroupId(pid).getGroupId() == sid) { return true; } } } else { int pid2 = rcb.getResumeInfo().getVirtualScanPid(); for (PartitionId pid : migratedPartitions) { if (pid2 == pid.getPartitionId()) { QueryStateException qse = new QueryStateException( "Partition " + pid.getPartitionId() + " migrated again out of its target shard " + sid); getLogger().info(qse.toString()); qse.throwClientException(); } } } return false; } /* * Method called at the end of a batch to check if a partition that * moved out of this shard was returned to this shard during the batch, * because of a failure at the target shard. If so, an exception is thrown. * * The method is called for normal shard scans only. For virtual-shard * scans it is not needed: Before the scan starts, we check that partition * is here. So, for the partition to be restored, it must have moved out * first. But the move-out is also registered during the scan, and an * exception will be thrown after the end of the scan in this case ( * in method checkForMigratedPartitions() above). */ private void checkForRestoredPartitions(RuntimeControlBlock rcb) { int sid = getRepNode().getRepNodeId().getGroupId(); Topology baseTopo = rcb.getBaseTopo(); ArrayList<PartitionId> restoredPartitions = rcb.getRestoredPartitions(); if (restoredPartitions.isEmpty()) { return; } for (PartitionId pid : restoredPartitions) { if (baseTopo.getRepGroupId(pid).getGroupId() == sid) { QueryStateException qse = new QueryStateException( "Partition " + pid.getPartitionId() + "found back in shard " + sid + " after having moved away from it."); getLogger().info(qse.toString()); qse.throwClientException(); } } } }
apache/hadoop
36,219
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.timelineservice.storage; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.Arrays; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVPrinter; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity; import org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters; import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList.Operator; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; public class TestFileSystemTimelineReaderImpl { private static final String ROOT_DIR = new File("target", TestFileSystemTimelineReaderImpl.class.getSimpleName()).getAbsolutePath(); private static String cluster = "cluster1"; private static String user = "user1"; private static String flowVersion = "v1"; private static String flowRunId = "1"; private FileSystemTimelineReaderImpl reader; @BeforeAll public static void setup() throws Exception { initializeDataDirectory(ROOT_DIR); } public static void initializeDataDirectory(String rootDir) throws Exception { loadEntityData(rootDir); // Create app flow mapping file. CSVFormat format = CSVFormat.Builder.create().setHeader("APP", "USER", "FLOW", "FLOWRUN").build(); String appFlowMappingFile = rootDir + File.separator + "entities" + File.separator + "cluster1" + File.separator + FileSystemTimelineReaderImpl.APP_FLOW_MAPPING_FILE; try (PrintWriter out = new PrintWriter(new BufferedWriter( new FileWriter(appFlowMappingFile, true))); CSVPrinter printer = new CSVPrinter(out, format)){ printer.printRecord("app1", "user1", "flow1", 1); printer.printRecord("app2", "user1", "flow1,flow", 1); printer.close(); } (new File(rootDir)).deleteOnExit(); } @AfterAll public static void tearDown() throws Exception { FileUtils.deleteDirectory(new File(ROOT_DIR)); } @BeforeEach public void init() throws Exception { reader = new FileSystemTimelineReaderImpl(); Configuration conf = new YarnConfiguration(); conf.set(FileSystemTimelineReaderImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, ROOT_DIR); reader.init(conf); } private static void writeEntityFile(TimelineEntity entity, File dir) throws Exception { if (!dir.exists()) { if (!dir.mkdirs()) { throw new IOException("Could not create directories for " + dir); } } String fileName = dir.getAbsolutePath() + File.separator + entity.getId() + ".thist"; try (PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, true)))){ out.println(TimelineUtils.dumpTimelineRecordtoJSON(entity)); out.write("\n"); out.close(); } } private static void loadEntityData(String rootDir) throws Exception { File appDir = getAppDir(rootDir, "flow1", "app1", "app"); TimelineEntity entity11 = new TimelineEntity(); entity11.setId("id_1"); entity11.setType("app"); entity11.setCreatedTime(1425016502000L); Map<String, Object> info1 = new HashMap<String, Object>(); info1.put("info1", "val1"); info1.put("info2", "val5"); entity11.addInfo(info1); TimelineEvent event = new TimelineEvent(); event.setId("event_1"); event.setTimestamp(1425016502003L); entity11.addEvent(event); Set<TimelineMetric> metrics = new HashSet<TimelineMetric>(); TimelineMetric metric1 = new TimelineMetric(); metric1.setId("metric1"); metric1.setType(TimelineMetric.Type.SINGLE_VALUE); metric1.addValue(1425016502006L, 113); metrics.add(metric1); TimelineMetric metric2 = new TimelineMetric(); metric2.setId("metric2"); metric2.setType(TimelineMetric.Type.TIME_SERIES); metric2.addValue(1425016502016L, 34); metrics.add(metric2); entity11.setMetrics(metrics); Map<String, String> configs = new HashMap<String, String>(); configs.put("config_1", "127"); entity11.setConfigs(configs); entity11.addRelatesToEntity("flow", "flow1"); entity11.addIsRelatedToEntity("type1", "tid1_1"); writeEntityFile(entity11, appDir); TimelineEntity entity12 = new TimelineEntity(); entity12.setId("id_1"); entity12.setType("app"); configs.clear(); configs.put("config_2", "23"); configs.put("config_3", "abc"); entity12.addConfigs(configs); metrics.clear(); TimelineMetric metric12 = new TimelineMetric(); metric12.setId("metric2"); metric12.setType(TimelineMetric.Type.TIME_SERIES); metric12.addValue(1425016502032L, 48); metric12.addValue(1425016502054L, 51); metrics.add(metric12); TimelineMetric metric3 = new TimelineMetric(); metric3.setId("metric3"); metric3.setType(TimelineMetric.Type.SINGLE_VALUE); metric3.addValue(1425016502060L, 23L); metrics.add(metric3); entity12.setMetrics(metrics); entity12.addIsRelatedToEntity("type1", "tid1_2"); entity12.addIsRelatedToEntity("type2", "tid2_1`"); TimelineEvent event15 = new TimelineEvent(); event15.setId("event_5"); event15.setTimestamp(1425016502017L); entity12.addEvent(event15); writeEntityFile(entity12, appDir); TimelineEntity entity2 = new TimelineEntity(); entity2.setId("id_2"); entity2.setType("app"); entity2.setCreatedTime(1425016501050L); Map<String, Object> info2 = new HashMap<String, Object>(); info1.put("info2", 4); entity2.addInfo(info2); Map<String, String> configs2 = new HashMap<String, String>(); configs2.put("config_1", "129"); configs2.put("config_3", "def"); entity2.setConfigs(configs2); TimelineEvent event2 = new TimelineEvent(); event2.setId("event_2"); event2.setTimestamp(1425016501003L); entity2.addEvent(event2); Set<TimelineMetric> metrics2 = new HashSet<TimelineMetric>(); TimelineMetric metric21 = new TimelineMetric(); metric21.setId("metric1"); metric21.setType(TimelineMetric.Type.SINGLE_VALUE); metric21.addValue(1425016501006L, 300); metrics2.add(metric21); TimelineMetric metric22 = new TimelineMetric(); metric22.setId("metric2"); metric22.setType(TimelineMetric.Type.TIME_SERIES); metric22.addValue(1425016501056L, 31); metric22.addValue(1425016501084L, 70); metrics2.add(metric22); TimelineMetric metric23 = new TimelineMetric(); metric23.setId("metric3"); metric23.setType(TimelineMetric.Type.SINGLE_VALUE); metric23.addValue(1425016502060L, 23L); metrics2.add(metric23); entity2.setMetrics(metrics2); entity2.addRelatesToEntity("flow", "flow2"); writeEntityFile(entity2, appDir); TimelineEntity entity3 = new TimelineEntity(); entity3.setId("id_3"); entity3.setType("app"); entity3.setCreatedTime(1425016501050L); Map<String, Object> info3 = new HashMap<String, Object>(); info3.put("info2", 3.5); info3.put("info4", 20); entity3.addInfo(info3); Map<String, String> configs3 = new HashMap<String, String>(); configs3.put("config_1", "123"); configs3.put("config_3", "abc"); entity3.setConfigs(configs3); TimelineEvent event3 = new TimelineEvent(); event3.setId("event_2"); event3.setTimestamp(1425016501003L); entity3.addEvent(event3); TimelineEvent event4 = new TimelineEvent(); event4.setId("event_4"); event4.setTimestamp(1425016502006L); entity3.addEvent(event4); Set<TimelineMetric> metrics3 = new HashSet<TimelineMetric>(); TimelineMetric metric31 = new TimelineMetric(); metric31.setId("metric1"); metric31.setType(TimelineMetric.Type.SINGLE_VALUE); metric31.addValue(1425016501006L, 124); metrics3.add(metric31); TimelineMetric metric32 = new TimelineMetric(); metric32.setId("metric2"); metric32.setType(TimelineMetric.Type.TIME_SERIES); metric32.addValue(1425016501056L, 31); metric32.addValue(1425016501084L, 74); metrics3.add(metric32); entity3.setMetrics(metrics3); entity3.addIsRelatedToEntity("type1", "tid1_2"); writeEntityFile(entity3, appDir); TimelineEntity entity4 = new TimelineEntity(); entity4.setId("id_4"); entity4.setType("app"); entity4.setCreatedTime(1425016502050L); TimelineEvent event44 = new TimelineEvent(); event44.setId("event_4"); event44.setTimestamp(1425016502003L); entity4.addEvent(event44); writeEntityFile(entity4, appDir); File attemptDir = getAppDir(rootDir, "flow1", "app1", TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString()); ApplicationAttemptEntity attempt1 = new ApplicationAttemptEntity(); attempt1.setId("app-attempt-1"); attempt1.setCreatedTime(1425017502003L); writeEntityFile(attempt1, attemptDir); ApplicationAttemptEntity attempt2 = new ApplicationAttemptEntity(); attempt2.setId("app-attempt-2"); attempt2.setCreatedTime(1425017502004L); writeEntityFile(attempt2, attemptDir); File entityDir = getAppDir(rootDir, "flow1", "app1", TimelineEntityType.YARN_CONTAINER.toString()); ContainerEntity containerEntity1 = new ContainerEntity(); containerEntity1.setId("container_1_1"); containerEntity1.setParent(attempt1.getIdentifier()); containerEntity1.setCreatedTime(1425017502003L); writeEntityFile(containerEntity1, entityDir); ContainerEntity containerEntity2 = new ContainerEntity(); containerEntity2.setId("container_2_1"); containerEntity2.setParent(attempt2.getIdentifier()); containerEntity2.setCreatedTime(1425018502003L); writeEntityFile(containerEntity2, entityDir); ContainerEntity containerEntity3 = new ContainerEntity(); containerEntity3.setId("container_2_2"); containerEntity3.setParent(attempt2.getIdentifier()); containerEntity3.setCreatedTime(1425018502003L); writeEntityFile(containerEntity3, entityDir); File appDir2 = getAppDir(rootDir, "flow1,flow", "app2", "app"); TimelineEntity entity5 = new TimelineEntity(); entity5.setId("id_5"); entity5.setType("app"); entity5.setCreatedTime(1425016502050L); writeEntityFile(entity5, appDir2); } private static File getAppDir(String rootDir, String flowName, String appId, String entityName) { return new File(rootDir + File.separator + "entities" + File.separator + cluster + File.separator + user + File.separator + flowName + File.separator + flowVersion + File.separator + flowRunId + File.separator + appId + File.separator + entityName + File.separator); } @Test void testGetEntityDefaultView() throws Exception { // If no fields are specified, entity is returned with default view i.e. // only the id, type and created time. TimelineEntity result = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, null, null, null, null)); assertEquals( (new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString()); assertEquals((Long) 1425016502000L, result.getCreatedTime()); assertEquals(0, result.getConfigs().size()); assertEquals(0, result.getMetrics().size()); } @Test void testGetEntityByClusterAndApp() throws Exception { // Cluster and AppId should be enough to get an entity. TimelineEntity result = reader.getEntity( new TimelineReaderContext("cluster1", null, null, null, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, null, null, null, null)); assertEquals( (new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString()); assertEquals((Long) 1425016502000L, result.getCreatedTime()); assertEquals(0, result.getConfigs().size()); assertEquals(0, result.getMetrics().size()); } /** This test checks whether we can handle commas in app flow mapping csv. */ @Test void testAppFlowMappingCsv() throws Exception { // Test getting an entity by cluster and app where flow entry // in app flow mapping csv has commas. TimelineEntity result = reader.getEntity( new TimelineReaderContext("cluster1", null, null, null, "app2", "app", "id_5"), new TimelineDataToRetrieve(null, null, null, null, null, null)); assertEquals( (new TimelineEntity.Identifier("app", "id_5")).toString(), result.getIdentifier().toString()); assertEquals((Long) 1425016502050L, result.getCreatedTime()); } @Test void testGetEntityCustomFields() throws Exception { // Specified fields in addition to default view will be returned. TimelineEntity result = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS), null, null, null)); assertEquals( (new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString()); assertEquals((Long) 1425016502000L, result.getCreatedTime()); assertEquals(3, result.getConfigs().size()); assertEquals(3, result.getMetrics().size()); assertEquals(2, result.getInfo().size()); // No events will be returned assertEquals(0, result.getEvents().size()); } @Test void testGetEntityAllFields() throws Exception { // All fields of TimelineEntity will be returned. TimelineEntity result = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null, null, null)); assertEquals( (new TimelineEntity.Identifier("app", "id_1")).toString(), result.getIdentifier().toString()); assertEquals((Long) 1425016502000L, result.getCreatedTime()); assertEquals(3, result.getConfigs().size()); assertEquals(3, result.getMetrics().size()); // All fields including events will be returned. assertEquals(2, result.getEvents().size()); } @Test void testGetAllEntities() throws Exception { Set<TimelineEntity> result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().build(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null, null, null)); // All 4 entities will be returned assertEquals(4, result.size()); } @Test void testGetEntitiesWithLimit() throws Exception { Set<TimelineEntity> result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().entityLimit(2L).build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); // Needs to be rewritten once hashcode and equals for // TimelineEntity is implemented // Entities with id_1 and id_4 should be returned, // based on created time, descending. for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_4")) { fail("Entity not sorted by created time"); } } result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().entityLimit(3L).build(), new TimelineDataToRetrieve()); // Even though 2 entities out of 4 have same created time, one entity // is left out due to limit assertEquals(3, result.size()); } @Test void testGetEntitiesByTimeWindows() throws Exception { // Get entities based on created time start and end time range. Set<TimelineEntity> result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().createdTimeBegin(1425016502030L) .createTimeEnd(1425016502060L).build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); // Only one entity with ID id_4 should be returned. for (TimelineEntity entity : result) { if (!entity.getId().equals("id_4")) { fail("Incorrect filtering based on created time range"); } } // Get entities if only created time end is specified. result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().createTimeEnd(1425016502010L) .build(), new TimelineDataToRetrieve()); assertEquals(3, result.size()); for (TimelineEntity entity : result) { if (entity.getId().equals("id_4")) { fail("Incorrect filtering based on created time range"); } } // Get entities if only created time start is specified. result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().createdTimeBegin(1425016502010L) .build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_4")) { fail("Incorrect filtering based on created time range"); } } } @Test void testGetFilteredEntities() throws Exception { // Get entities based on info filters. TimelineFilterList infoFilterList = new TimelineFilterList(); infoFilterList.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5)); Set<TimelineEntity> result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().infoFilters(infoFilterList).build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); // Only one entity with ID id_3 should be returned. for (TimelineEntity entity : result) { if (!entity.getId().equals("id_3")) { fail("Incorrect filtering based on info filters"); } } // Get entities based on config filters. TimelineFilterList confFilterList = new TimelineFilterList(); confFilterList.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "123")); confFilterList.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().configFilters(confFilterList) .build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_3")) { fail("Incorrect filtering based on config filters"); } } // Get entities based on event filters. TimelineFilterList eventFilters = new TimelineFilterList(); eventFilters.addFilter( new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_2")); eventFilters.addFilter( new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_4")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().eventFilters(eventFilters).build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_3")) { fail("Incorrect filtering based on event filters"); } } // Get entities based on metric filters. TimelineFilterList metricFilterList = new TimelineFilterList(); metricFilterList.addFilter(new TimelineCompareFilter( TimelineCompareOp.GREATER_OR_EQUAL, "metric3", 0L)); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().metricFilters(metricFilterList) .build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); // Two entities with IDs' id_1 and id_2 should be returned. for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { fail("Incorrect filtering based on metric filters"); } } // Get entities based on complex config filters. TimelineFilterList list1 = new TimelineFilterList(); list1.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_1", "129")); list1.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "def")); TimelineFilterList list2 = new TimelineFilterList(); list2.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23")); list2.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_3", "abc")); TimelineFilterList confFilterList1 = new TimelineFilterList(Operator.OR, list1, list2); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().configFilters(confFilterList1) .build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { fail("Incorrect filtering based on config filters"); } } TimelineFilterList list3 = new TimelineFilterList(); list3.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.NOT_EQUAL, "config_1", "123")); list3.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.NOT_EQUAL, "config_3", "abc")); TimelineFilterList list4 = new TimelineFilterList(); list4.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "config_2", "23")); TimelineFilterList confFilterList2 = new TimelineFilterList(Operator.OR, list3, list4); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().configFilters(confFilterList2) .build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { fail("Incorrect filtering based on config filters"); } } TimelineFilterList confFilterList3 = new TimelineFilterList(); confFilterList3.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.NOT_EQUAL, "config_1", "127")); confFilterList3.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.NOT_EQUAL, "config_3", "abc")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().configFilters(confFilterList3) .build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_2")) { fail("Incorrect filtering based on config filters"); } } TimelineFilterList confFilterList4 = new TimelineFilterList(); confFilterList4.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.EQUAL, "config_dummy", "dummy")); confFilterList4.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.EQUAL, "config_3", "def")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().configFilters(confFilterList4) .build(), new TimelineDataToRetrieve()); assertEquals(0, result.size()); TimelineFilterList confFilterList5 = new TimelineFilterList(Operator.OR); confFilterList5.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.EQUAL, "config_dummy", "dummy")); confFilterList5.addFilter(new TimelineKeyValueFilter( TimelineCompareOp.EQUAL, "config_3", "def")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().configFilters(confFilterList5) .build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_2")) { fail("Incorrect filtering based on config filters"); } } // Get entities based on complex metric filters. TimelineFilterList list6 = new TimelineFilterList(); list6.addFilter(new TimelineCompareFilter( TimelineCompareOp.GREATER_THAN, "metric1", 200)); list6.addFilter(new TimelineCompareFilter( TimelineCompareOp.EQUAL, "metric3", 23)); TimelineFilterList list7 = new TimelineFilterList(); list7.addFilter(new TimelineCompareFilter( TimelineCompareOp.GREATER_OR_EQUAL, "metric2", 74)); TimelineFilterList metricFilterList1 = new TimelineFilterList(Operator.OR, list6, list7); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().metricFilters(metricFilterList1) .build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); // Two entities with IDs' id_2 and id_3 should be returned. for (TimelineEntity entity : result) { if (!entity.getId().equals("id_2") && !entity.getId().equals("id_3")) { fail("Incorrect filtering based on metric filters"); } } TimelineFilterList metricFilterList2 = new TimelineFilterList(); metricFilterList2.addFilter(new TimelineCompareFilter( TimelineCompareOp.LESS_THAN, "metric2", 70)); metricFilterList2.addFilter(new TimelineCompareFilter( TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23)); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().metricFilters(metricFilterList2) .build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1")) { fail("Incorrect filtering based on metric filters"); } } TimelineFilterList metricFilterList3 = new TimelineFilterList(); metricFilterList3.addFilter(new TimelineCompareFilter( TimelineCompareOp.LESS_THAN, "dummy_metric", 30)); metricFilterList3.addFilter(new TimelineCompareFilter( TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23)); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().metricFilters(metricFilterList3) .build(), new TimelineDataToRetrieve()); assertEquals(0, result.size()); TimelineFilterList metricFilterList4 = new TimelineFilterList(Operator.OR); metricFilterList4.addFilter(new TimelineCompareFilter( TimelineCompareOp.LESS_THAN, "dummy_metric", 30)); metricFilterList4.addFilter(new TimelineCompareFilter( TimelineCompareOp.LESS_OR_EQUAL, "metric3", 23)); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().metricFilters(metricFilterList4) .build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { fail("Incorrect filtering based on metric filters"); } } TimelineFilterList metricFilterList5 = new TimelineFilterList(new TimelineCompareFilter( TimelineCompareOp.NOT_EQUAL, "metric2", 74)); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().metricFilters(metricFilterList5) .build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_2")) { fail("Incorrect filtering based on metric filters"); } } TimelineFilterList infoFilterList1 = new TimelineFilterList(); infoFilterList1.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5)); infoFilterList1.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.NOT_EQUAL, "info4", 20)); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().infoFilters(infoFilterList1) .build(), new TimelineDataToRetrieve()); assertEquals(0, result.size()); TimelineFilterList infoFilterList2 = new TimelineFilterList(Operator.OR); infoFilterList2.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", 3.5)); infoFilterList2.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info1", "val1")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().infoFilters(infoFilterList2) .build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) { fail("Incorrect filtering based on info filters"); } } TimelineFilterList infoFilterList3 = new TimelineFilterList(); infoFilterList3.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1)); infoFilterList3.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().infoFilters(infoFilterList3) .build(), new TimelineDataToRetrieve()); assertEquals(0, result.size()); TimelineFilterList infoFilterList4 = new TimelineFilterList(Operator.OR); infoFilterList4.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "dummy_info", 1)); infoFilterList4.addFilter( new TimelineKeyValueFilter(TimelineCompareOp.EQUAL, "info2", "val5")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().infoFilters(infoFilterList4) .build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1")) { fail("Incorrect filtering based on info filters"); } } } @Test void testGetEntitiesByRelations() throws Exception { // Get entities based on relatesTo. TimelineFilterList relatesTo = new TimelineFilterList(Operator.OR); Set<Object> relatesToIds = new HashSet<Object>(Arrays.asList((Object) "flow1")); relatesTo.addFilter(new TimelineKeyValuesFilter( TimelineCompareOp.EQUAL, "flow", relatesToIds)); Set<TimelineEntity> result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().relatesTo(relatesTo).build(), new TimelineDataToRetrieve()); assertEquals(1, result.size()); // Only one entity with ID id_1 should be returned. for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1")) { fail("Incorrect filtering based on relatesTo"); } } // Get entities based on isRelatedTo. TimelineFilterList isRelatedTo = new TimelineFilterList(Operator.OR); Set<Object> isRelatedToIds = new HashSet<Object>(Arrays.asList((Object) "tid1_2")); isRelatedTo.addFilter(new TimelineKeyValuesFilter( TimelineCompareOp.EQUAL, "type1", isRelatedToIds)); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), new TimelineEntityFilters.Builder().isRelatedTo(isRelatedTo).build(), new TimelineDataToRetrieve()); assertEquals(2, result.size()); // Two entities with IDs' id_1 and id_3 should be returned. for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1") && !entity.getId().equals("id_3")) { fail("Incorrect filtering based on isRelatedTo"); } } } }
apache/juneau
35,176
juneau-core/juneau-marshall/src/main/java/org/apache/juneau/objecttools/ObjectRest.java
// *************************************************************************************************************************** // * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file * // * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * // * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * // * with the License. You may obtain a copy of the License at * // * * // * http://www.apache.org/licenses/LICENSE-2.0 * // * * // * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an * // * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * // * specific language governing permissions and limitations under the License. * // *************************************************************************************************************************** package org.apache.juneau.objecttools; import static java.net.HttpURLConnection.*; import static org.apache.juneau.common.utils.Utils.*; import java.io.*; import java.lang.reflect.*; import java.util.*; import org.apache.juneau.*; import org.apache.juneau.collections.*; import org.apache.juneau.json.*; import org.apache.juneau.parser.*; /** * POJO REST API. * * <p> * Provides the ability to perform standard REST operations (GET, PUT, POST, DELETE) against nodes in a POJO model. * Nodes in the POJO model are addressed using URLs. * * <p> * A POJO model is defined as a tree model where nodes consist of consisting of the following: * <ul class='spaced-list'> * <li> * {@link Map Maps} and Java beans representing JSON objects. * <li> * {@link Collection Collections} and arrays representing JSON arrays. * <li> * Java beans. * </ul> * * <p> * Leaves of the tree can be any type of object. * * <p> * Use {@link #get(String) get()} to retrieve an element from a JSON tree. * <br>Use {@link #put(String,Object) put()} to create (or overwrite) an element in a JSON tree. * <br>Use {@link #post(String,Object) post()} to add an element to a list in a JSON tree. * <br>Use {@link #delete(String) delete()} to remove an element from a JSON tree. * * <p> * Leading slashes in URLs are ignored. * So <js>"/xxx/yyy/zzz"</js> and <js>"xxx/yyy/zzz"</js> are considered identical. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jc>// Construct an unstructured POJO model</jc> * JsonMap <jv>map</jv> = JsonMap.<jsm>ofJson</jsm>(<js>""</js> * + <js>"{"</js> * + <js>" name:'John Smith', "</js> * + <js>" address:{ "</js> * + <js>" streetAddress:'21 2nd Street', "</js> * + <js>" city:'New York', "</js> * + <js>" state:'NY', "</js> * + <js>" postalCode:10021 "</js> * + <js>" }, "</js> * + <js>" phoneNumbers:[ "</js> * + <js>" '212 555-1111', "</js> * + <js>" '212 555-2222' "</js> * + <js>" ], "</js> * + <js>" additionalInfo:null, "</js> * + <js>" remote:false, "</js> * + <js>" height:62.4, "</js> * + <js>" 'fico score':' &gt; 640' "</js> * + <js>"} "</js> * ); * * <jc>// Wrap Map inside an ObjectRest object</jc> * ObjectRest <jv>johnSmith</jv> = ObjectRest.<jsm>create</jsm>(<jv>map</jv>); * * <jc>// Get a simple value at the top level</jc> * <jc>// "John Smith"</jc> * String <jv>name</jv> = <jv>johnSmith</jv>.getString(<js>"name"</js>); * * <jc>// Change a simple value at the top level</jc> * <jv>johnSmith</jv>.put(<js>"name"</js>, <js>"The late John Smith"</js>); * * <jc>// Get a simple value at a deep level</jc> * <jc>// "21 2nd Street"</jc> * String <jv>streetAddress</jv> = <jv>johnSmith</jv>.getString(<js>"address/streetAddress"</js>); * * <jc>// Set a simple value at a deep level</jc> * <jv>johnSmith</jv>.put(<js>"address/streetAddress"</js>, <js>"101 Cemetery Way"</js>); * * <jc>// Get entries in a list</jc> * <jc>// "212 555-1111"</jc> * String <jv>firstPhoneNumber</jv> = <jv>johnSmith</jv>.getString(<js>"phoneNumbers/0"</js>); * * <jc>// Add entries to a list</jc> * <jv>johnSmith</jv>.post(<js>"phoneNumbers"</js>, <js>"212 555-3333"</js>); * * <jc>// Delete entries from a model</jc> * <jv>johnSmith</jv>.delete(<js>"fico score"</js>); * * <jc>// Add entirely new structures to the tree</jc> * JsonMap <jv>medicalInfo</jv> = JsonMap.<jsm>ofJson</jsm>(<js>""</js> * + <js>"{"</js> * + <js>" currentStatus: 'deceased',"</js> * + <js>" health: 'non-existent',"</js> * + <js>" creditWorthiness: 'not good'"</js> * + <js>"}"</js> * ); * <jv>johnSmith</jv>.put(<js>"additionalInfo/medicalInfo"</js>, <jv>medicalInfo</jv>); * </p> * * <p> * In the special case of collections/arrays of maps/beans, a special XPath-like selector notation can be used in lieu * of index numbers on GET requests to return a map/bean with a specified attribute value. * <br>The syntax is {@code @attr=val}, where attr is the attribute name on the child map, and val is the matching value. * * <h5 class='section'>Example:</h5> * <p class='bjava'> * <jc>// Get map/bean with name attribute value of 'foo' from a list of items</jc> * Map <jv>map</jv> = <jv>objectRest</jv>.getMap(<js>"/items/@name=foo"</js>); * </p> * * <h5 class='section'>See Also:</h5><ul> * </ul> */ @SuppressWarnings({"unchecked","rawtypes"}) public class ObjectRest { //----------------------------------------------------------------------------------------------------------------- // Static //----------------------------------------------------------------------------------------------------------------- /** The list of possible request types. */ private static final int GET=1, PUT=2, POST=3, DELETE=4; /** * Static creator. * @param o The object being wrapped. * @return A new {@link ObjectRest} object. */ public static ObjectRest create(Object o) { return new ObjectRest(o); } /** * Static creator. * @param o The object being wrapped. * @param parser The parser to use for parsing arguments and converting objects to the correct data type. * @return A new {@link ObjectRest} object. */ public static ObjectRest create(Object o, ReaderParser parser) { return new ObjectRest(o, parser); } //----------------------------------------------------------------------------------------------------------------- // Instance //----------------------------------------------------------------------------------------------------------------- private ReaderParser parser = JsonParser.DEFAULT; final BeanSession session; /** If true, the root cannot be overwritten */ private boolean rootLocked; /** The root of the model. */ private JsonNode root; /** * Create a new instance of a REST interface over the specified object. * * <p> * Uses {@link BeanContext#DEFAULT} for working with Java beans. * * @param o The object to be wrapped. */ public ObjectRest(Object o) { this(o, null); } /** * Create a new instance of a REST interface over the specified object. * * <p> * The parser is used as the bean context. * * @param o The object to be wrapped. * @param parser The parser to use for parsing arguments and converting objects to the correct data type. */ public ObjectRest(Object o, ReaderParser parser) { this.session = parser == null ? BeanContext.DEFAULT_SESSION : parser.getBeanContext().getSession(); if (parser == null) parser = JsonParser.DEFAULT; this.parser = parser; this.root = new JsonNode(null, null, o, session.object()); } /** * Call this method to prevent the root object from being overwritten on <c>put("", xxx);</c> calls. * * @return This object. */ public ObjectRest setRootLocked() { this.rootLocked = true; return this; } /** * The root object that was passed into the constructor of this method. * * @return The root object. */ public Object getRootObject() { return root.o; } /** * Retrieves the element addressed by the URL. * * @param url * The URL of the element to retrieve. * <br>If <jk>null</jk> or blank, returns the root. * @return The addressed element, or <jk>null</jk> if that element does not exist in the tree. */ public Object get(String url) { return getWithDefault(url, null); } /** * Retrieves the element addressed by the URL. * * @param url * The URL of the element to retrieve. * <br>If <jk>null</jk> or blank, returns the root. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The addressed element, or null if that element does not exist in the tree. */ public Object getWithDefault(String url, Object defVal) { Object o = service(GET, url, null); return o == null ? defVal : o; } /** * Retrieves the element addressed by the URL as the specified object type. * * <p> * Will convert object to the specified type per {@link BeanSession#convertToType(Object, Class)}. * * <h5 class='section'>Examples:</h5> * <p class='bjava'> * ObjectRest <jv>objectRest</jv> = <jk>new</jk> ObjectRest(<jv>object</jv>); * * <jc>// Value converted to a string.</jc> * String <jv>string</jv> = <jv>objectRest</jv>.get(<js>"path/to/string"</js>, String.<jk>class</jk>); * * <jc>// Value converted to a bean.</jc> * MyBean <jv>bean</jv> = <jv>objectRest</jv>.get(<js>"path/to/bean"</js>, MyBean.<jk>class</jk>); * * <jc>// Value converted to a bean array.</jc> * MyBean[] <jv>beanArray</jv> = <jv>objectRest</jv>.get(<js>"path/to/beanarray"</js>, MyBean[].<jk>class</jk>); * * <jc>// Value converted to a linked-list of objects.</jc> * List <jv>list</jv> = <jv>objectRest</jv>.get(<js>"path/to/list"</js>, LinkedList.<jk>class</jk>); * * <jc>// Value converted to a map of object keys/values.</jc> * Map <jv>map</jv> = <jv>objectRest</jv>.get(<js>"path/to/map"</js>, TreeMap.<jk>class</jk>); * </p> * * @param url * The URL of the element to retrieve. * If <jk>null</jk> or blank, returns the root. * @param type The specified object type. * * @param <T> The specified object type. * @return The addressed element, or null if that element does not exist in the tree. */ public <T> T get(String url, Class<T> type) { return getWithDefault(url, null, type); } /** * Retrieves the element addressed by the URL as the specified object type. * * <p> * Will convert object to the specified type per {@link BeanSession#convertToType(Object, Class)}. * * <p> * The type can be a simple type (e.g. beans, strings, numbers) or parameterized type (collections/maps). * * <h5 class='section'>Examples:</h5> * <p class='bjava'> * ObjectRest <jv>objectRest</jv> = <jk>new</jk> ObjectRest(<jv>object</jv>); * * <jc>// Value converted to a linked-list of strings.</jc> * List&lt;String&gt; <jv>list1</jv> = <jv>objectRest</jv>.get(<js>"path/to/list1"</js>, LinkedList.<jk>class</jk>, String.<jk>class</jk>); * * <jc>// Value converted to a linked-list of beans.</jc> * List&lt;MyBean&gt; <jv>list2</jv> = <jv>objectRest</jv>.get(<js>"path/to/list2"</js>, LinkedList.<jk>class</jk>, MyBean.<jk>class</jk>); * * <jc>// Value converted to a linked-list of linked-lists of strings.</jc> * List&lt;List&lt;String&gt;&gt; <jv>list3</jv> = <jv>objectRest</jv>.get(<js>"path/to/list3"</js>, LinkedList.<jk>class</jk>, LinkedList.<jk>class</jk>, String.<jk>class</jk>); * * <jc>// Value converted to a map of string keys/values.</jc> * Map&lt;String,String&gt; <jv>map1</jv> = <jv>objectRest</jv>.get(<js>"path/to/map1"</js>, TreeMap.<jk>class</jk>, String.<jk>class</jk>, String.<jk>class</jk>); * * <jc>// Value converted to a map containing string keys and values of lists containing beans.</jc> * Map&lt;String,List&lt;MyBean&gt;&gt; <jv>map2</jv> = <jv>objectRest</jv>.get(<js>"path/to/map2"</js>, TreeMap.<jk>class</jk>, String.<jk>class</jk>, List.<jk>class</jk>, MyBean.<jk>class</jk>); * </p> * * <p> * <c>Collection</c> classes are assumed to be followed by zero or one objects indicating the element type. * * <p> * <c>Map</c> classes are assumed to be followed by zero or two meta objects indicating the key and value types. * * <p> * The array can be arbitrarily long to indicate arbitrarily complex data structures. * * <h5 class='section'>Notes:</h5><ul> * <li class='note'> * Use the {@link #get(String, Class)} method instead if you don't need a parameterized map/collection. * </ul> * * @param url * The URL of the element to retrieve. * If <jk>null</jk> or blank, returns the root. * @param type The specified object type. * @param args The specified object parameter types. * * @param <T> The specified object type. * @return The addressed element, or null if that element does not exist in the tree. */ public <T> T get(String url, Type type, Type...args) { return getWithDefault(url, null, type, args); } /** * Same as {@link #get(String, Class)} but returns a default value if the addressed element is null or non-existent. * * @param url * The URL of the element to retrieve. * If <jk>null</jk> or blank, returns the root. * @param def The default value if addressed item does not exist. * @param type The specified object type. * * @param <T> The specified object type. * @return The addressed element, or null if that element does not exist in the tree. */ public <T> T getWithDefault(String url, T def, Class<T> type) { Object o = service(GET, url, null); if (o == null) return def; return session.convertToType(o, type); } /** * Same as {@link #get(String,Type,Type[])} but returns a default value if the addressed element is null or non-existent. * * @param url * The URL of the element to retrieve. * If <jk>null</jk> or blank, returns the root. * @param def The default value if addressed item does not exist. * @param type The specified object type. * @param args The specified object parameter types. * * @param <T> The specified object type. * @return The addressed element, or null if that element does not exist in the tree. */ public <T> T getWithDefault(String url, T def, Type type, Type...args) { Object o = service(GET, url, null); if (o == null) return def; return session.convertToType(o, type, args); } /** * Returns the specified entry value converted to a {@link String}. * * <p> * Shortcut for <code>get(String.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. */ public String getString(String url) { return get(url, String.class); } /** * Returns the specified entry value converted to a {@link String}. * * <p> * Shortcut for <code>get(String.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. */ public String getString(String url, String defVal) { return getWithDefault(url, defVal, String.class); } /** * Returns the specified entry value converted to an {@link Integer}. * * <p> * Shortcut for <code>get(Integer.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Integer getInt(String url) { return get(url, Integer.class); } /** * Returns the specified entry value converted to an {@link Integer}. * * <p> * Shortcut for <code>get(Integer.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Integer getInt(String url, Integer defVal) { return getWithDefault(url, defVal, Integer.class); } /** * Returns the specified entry value converted to a {@link Long}. * * <p> * Shortcut for <code>get(Long.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Long getLong(String url) { return get(url, Long.class); } /** * Returns the specified entry value converted to a {@link Long}. * * <p> * Shortcut for <code>get(Long.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Long getLong(String url, Long defVal) { return getWithDefault(url, defVal, Long.class); } /** * Returns the specified entry value converted to a {@link Boolean}. * * <p> * Shortcut for <code>get(Boolean.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Boolean getBoolean(String url) { return get(url, Boolean.class); } /** * Returns the specified entry value converted to a {@link Boolean}. * * <p> * Shortcut for <code>get(Boolean.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Boolean getBoolean(String url, Boolean defVal) { return getWithDefault(url, defVal, Boolean.class); } /** * Returns the specified entry value converted to a {@link Map}. * * <p> * Shortcut for <code>get(Map.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Map<?,?> getMap(String url) { return get(url, Map.class); } /** * Returns the specified entry value converted to a {@link Map}. * * <p> * Shortcut for <code>get(Map.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public Map<?,?> getMap(String url, Map<?,?> defVal) { return getWithDefault(url, defVal, Map.class); } /** * Returns the specified entry value converted to a {@link List}. * * <p> * Shortcut for <code>get(List.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public List<?> getList(String url) { return get(url, List.class); } /** * Returns the specified entry value converted to a {@link List}. * * <p> * Shortcut for <code>get(List.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public List<?> getList(String url, List<?> defVal) { return getWithDefault(url, defVal, List.class); } /** * Returns the specified entry value converted to a {@link Map}. * * <p> * Shortcut for <code>get(JsonMap.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public JsonMap getJsonMap(String url) { return get(url, JsonMap.class); } /** * Returns the specified entry value converted to a {@link JsonMap}. * * <p> * Shortcut for <code>get(JsonMap.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public JsonMap getJsonMap(String url, JsonMap defVal) { return getWithDefault(url, defVal, JsonMap.class); } /** * Returns the specified entry value converted to a {@link JsonList}. * * <p> * Shortcut for <code>get(JsonList.<jk>class</jk>, key)</code>. * * @param url The key. * @return The converted value, or <jk>null</jk> if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public JsonList getJsonList(String url) { return get(url, JsonList.class); } /** * Returns the specified entry value converted to a {@link JsonList}. * * <p> * Shortcut for <code>get(JsonList.<jk>class</jk>, key, defVal)</code>. * * @param url The key. * @param defVal The default value if the map doesn't contain the specified mapping. * @return The converted value, or the default value if the map contains no mapping for this key. * @throws InvalidDataConversionException If value cannot be converted. */ public JsonList getJsonList(String url, JsonList defVal) { return getWithDefault(url, defVal, JsonList.class); } /** * Executes the specified method with the specified parameters on the specified object. * * @param url The URL of the element to retrieve. * @param method * The method signature. * <p> * Can be any of the following formats: * <ul class='spaced-list'> * <li> * Method name only. e.g. <js>"myMethod"</js>. * <li> * Method name with class names. e.g. <js>"myMethod(String,int)"</js>. * <li> * Method name with fully-qualified class names. e.g. <js>"myMethod(java.util.String,int)"</js>. * </ul> * <p> * As a rule, use the simplest format needed to uniquely resolve a method. * @param args * The arguments to pass as parameters to the method. * These will automatically be converted to the appropriate object type if possible. * This must be an array, like a JSON array. * @return The returned object from the method call. * @throws ExecutableException Exception occurred on invoked constructor/method/field. * @throws ParseException Malformed input encountered. * @throws IOException Thrown by underlying stream. */ public Object invokeMethod(String url, String method, String args) throws ExecutableException, ParseException, IOException { try { return new ObjectIntrospector(get(url), parser).invokeMethod(method, args); } catch (NoSuchMethodException | IllegalArgumentException | InvocationTargetException | IllegalAccessException e) { throw new ExecutableException(e); } } /** * Returns the list of available methods that can be passed to the {@link #invokeMethod(String, String, String)} * for the object addressed by the specified URL. * * @param url The URL. * @return The list of methods. */ public Collection<String> getPublicMethods(String url) { Object o = get(url); if (o == null) return null; return session.getClassMeta(o.getClass()).getPublicMethods().keySet(); } /** * Returns the class type of the object at the specified URL. * * @param url The URL. * @return The class type. */ public ClassMeta getClassMeta(String url) { JsonNode n = getNode(normalizeUrl(url), root); if (n == null) return null; return n.cm; } /** * Sets/replaces the element addressed by the URL. * * <p> * This method expands the POJO model as necessary to create the new element. * * @param url * The URL of the element to create. * If <jk>null</jk> or blank, the root itself is replaced with the specified value. * @param val The value being set. Value can be of any type. * @return The previously addressed element, or <jk>null</jk> the element did not previously exist. */ public Object put(String url, Object val) { return service(PUT, url, val); } /** * Adds a value to a list element in a POJO model. * * <p> * The URL is the address of the list being added to. * * <p> * If the list does not already exist, it will be created. * * <p> * This method expands the POJO model as necessary to create the new element. * * <h5 class='section'>Notes:</h5><ul> * <li class='note'> * You can only post to three types of nodes: * <ul> * <li>{@link List Lists} * <li>{@link Map Maps} containing integers as keys (i.e sparse arrays) * <li>arrays * </ul> * </ul> * * @param url * The URL of the element being added to. * If <jk>null</jk> or blank, the root itself (assuming it's one of the types specified above) is added to. * @param val The value being added. * @return The URL of the element that was added. */ public String post(String url, Object val) { return (String)service(POST, url, val); } /** * Remove an element from a POJO model. * * <p> * If the element does not exist, no action is taken. * * @param url * The URL of the element being deleted. * If <jk>null</jk> or blank, the root itself is deleted. * @return The removed element, or null if that element does not exist. */ public Object delete(String url) { return service(DELETE, url, null); } @Override /* Object */ public String toString() { return String.valueOf(root.o); } /** Handle nulls and strip off leading '/' char. */ private static String normalizeUrl(String url) { // Interpret nulls and blanks the same (i.e. as addressing the root itself) if (url == null) url = ""; // Strip off leading slash if present. if (isNotEmpty(url) && url.charAt(0) == '/') url = url.substring(1); return url; } /* * Workhorse method. */ private Object service(int method, String url, Object val) throws ObjectRestException { url = normalizeUrl(url); if (method == GET) { JsonNode p = getNode(url, root); return p == null ? null : p.o; } // Get the url of the parent and the property name of the addressed object. int i = url.lastIndexOf('/'); String parentUrl = (i == -1 ? null : url.substring(0, i)); String childKey = (i == -1 ? url : url.substring(i + 1)); if (method == PUT) { if (url.isEmpty()) { if (rootLocked) throw new ObjectRestException(HTTP_FORBIDDEN, "Cannot overwrite root object"); Object o = root.o; root = new JsonNode(null, null, val, session.object()); return o; } JsonNode n = (parentUrl == null ? root : getNode(parentUrl, root)); if (n == null) throw new ObjectRestException(HTTP_NOT_FOUND, "Node at URL ''{0}'' not found.", parentUrl); ClassMeta cm = n.cm; Object o = n.o; if (cm.isMap()) return ((Map)o).put(childKey, convert(val, cm.getValueType())); if (cm.isCollection() && o instanceof List) return ((List)o).set(parseInt(childKey), convert(val, cm.getElementType())); if (cm.isArray()) { o = setArrayEntry(n.o, parseInt(childKey), val, cm.getElementType()); ClassMeta pct = n.parent.cm; Object po = n.parent.o; if (pct.isMap()) { ((Map)po).put(n.keyName, o); return url; } if (pct.isBean()) { BeanMap m = session.toBeanMap(po); m.put(n.keyName, o); return url; } throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot perform PUT on ''{0}'' with parent node type ''{1}''", url, pct); } if (cm.isBean()) return session.toBeanMap(o).put(childKey, val); throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot perform PUT on ''{0}'' whose parent is of type ''{1}''", url, cm); } if (method == POST) { // Handle POST to root special if (url.isEmpty()) { ClassMeta cm = root.cm; Object o = root.o; if (cm.isCollection()) { Collection c = (Collection)o; c.add(convert(val, cm.getElementType())); return (c instanceof List ? url + "/" + (c.size()-1) : null); } if (cm.isArray()) { Object[] o2 = addArrayEntry(o, val, cm.getElementType()); root = new JsonNode(null, null, o2, null); return url + "/" + (o2.length-1); } throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot perform POST on ''{0}'' of type ''{1}''", url, cm); } JsonNode n = getNode(url, root); if (n == null) throw new ObjectRestException(HTTP_NOT_FOUND, "Node at URL ''{0}'' not found.", url); ClassMeta cm = n.cm; Object o = n.o; if (cm.isArray()) { Object[] o2 = addArrayEntry(o, val, cm.getElementType()); ClassMeta pct = n.parent.cm; Object po = n.parent.o; if (pct.isMap()) { ((Map)po).put(childKey, o2); return url + "/" + (o2.length-1); } if (pct.isBean()) { BeanMap m = session.toBeanMap(po); m.put(childKey, o2); return url + "/" + (o2.length-1); } throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot perform POST on ''{0}'' with parent node type ''{1}''", url, pct); } if (cm.isCollection()) { Collection c = (Collection)o; c.add(convert(val, cm.getElementType())); return (c instanceof List ? url + "/" + (c.size()-1) : null); } throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot perform POST on ''{0}'' of type ''{1}''", url, cm); } if (method == DELETE) { if (url.isEmpty()) { if (rootLocked) throw new ObjectRestException(HTTP_FORBIDDEN, "Cannot overwrite root object"); Object o = root.o; root = new JsonNode(null, null, null, session.object()); return o; } JsonNode n = (parentUrl == null ? root : getNode(parentUrl, root)); ClassMeta cm = n.cm; Object o = n.o; if (cm.isMap()) return ((Map)o).remove(childKey); if (cm.isCollection() && o instanceof List) return ((List)o).remove(parseInt(childKey)); if (cm.isArray()) { int index = parseInt(childKey); Object old = ((Object[])o)[index]; Object[] o2 = removeArrayEntry(o, index); ClassMeta pct = n.parent.cm; Object po = n.parent.o; if (pct.isMap()) { ((Map)po).put(n.keyName, o2); return old; } if (pct.isBean()) { BeanMap m = session.toBeanMap(po); m.put(n.keyName, o2); return old; } throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot perform POST on ''{0}'' with parent node type ''{1}''", url, pct); } if (cm.isBean()) return session.toBeanMap(o).put(childKey, null); throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot perform PUT on ''{0}'' whose parent is of type ''{1}''", url, cm); } return null; // Never gets here. } private Object[] setArrayEntry(Object o, int index, Object val, ClassMeta componentType) { Object[] a = (Object[])o; if (a.length <= index) { // Expand out the array. Object[] a2 = (Object[])Array.newInstance(a.getClass().getComponentType(), index+1); System.arraycopy(a, 0, a2, 0, a.length); a = a2; } a[index] = convert(val, componentType); return a; } private Object[] addArrayEntry(Object o, Object val, ClassMeta componentType) { Object[] a = (Object[])o; // Expand out the array. Object[] a2 = (Object[])Array.newInstance(a.getClass().getComponentType(), a.length+1); System.arraycopy(a, 0, a2, 0, a.length); a2[a.length] = convert(val, componentType); return a2; } private static Object[] removeArrayEntry(Object o, int index) { Object[] a = (Object[])o; // Shrink the array. Object[] a2 = (Object[])Array.newInstance(a.getClass().getComponentType(), a.length-1); System.arraycopy(a, 0, a2, 0, index); System.arraycopy(a, index+1, a2, index, a.length-index-1); return a2; } class JsonNode { Object o; ClassMeta cm; JsonNode parent; String keyName; JsonNode(JsonNode parent, String keyName, Object o, ClassMeta cm) { this.o = o; this.keyName = keyName; this.parent = parent; if (cm == null || cm.isObject()) { if (o == null) cm = session.object(); else cm = session.getClassMetaForObject(o); } this.cm = cm; } } JsonNode getNode(String url, JsonNode n) { if (url == null || url.isEmpty()) return n; int i = url.indexOf('/'); String parentKey, childUrl = null; if (i == -1) { parentKey = url; } else { parentKey = url.substring(0, i); childUrl = url.substring(i + 1); } Object o = n.o; Object o2 = null; ClassMeta cm = n.cm; ClassMeta ct2 = null; if (o == null) return null; if (cm.isMap()) { o2 = ((Map)o).get(parentKey); ct2 = cm.getValueType(); } else if (cm.isCollection() && o instanceof List) { int key = parseInt(parentKey); List l = ((List)o); if (l.size() <= key) return null; o2 = l.get(key); ct2 = cm.getElementType(); } else if (cm.isArray()) { int key = parseInt(parentKey); Object[] a = ((Object[])o); if (a.length <= key) return null; o2 = a[key]; ct2 = cm.getElementType(); } else if (cm.isBean()) { BeanMap m = session.toBeanMap(o); o2 = m.get(parentKey); BeanPropertyMeta pMeta = m.getPropertyMeta(parentKey); if (pMeta == null) throw new ObjectRestException(HTTP_BAD_REQUEST, "Unknown property ''{0}'' encountered while trying to parse into class ''{1}''", parentKey, m.getClassMeta() ); ct2 = pMeta.getClassMeta(); } if (childUrl == null) return new JsonNode(n, parentKey, o2, ct2); return getNode(childUrl, new JsonNode(n, parentKey, o2, ct2)); } private Object convert(Object in, ClassMeta cm) { if (cm == null) return in; if (cm.isBean() && in instanceof Map) return session.convertToType(in, cm); return in; } private static int parseInt(String key) { try { return Integer.parseInt(key); } catch (NumberFormatException e) { throw new ObjectRestException(HTTP_BAD_REQUEST, "Cannot address an item in an array with a non-integer key ''{0}''", key ); } } }
apache/felix-dev
36,369
ipojo/runtime/core-it/ipojo-core-service-dependency-optional-test/src/test/java/org/apache/felix/ipojo/runtime/core/test/dependencies/optional/TestOptionalNoNullableDependencies.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.ipojo.runtime.core.test.dependencies.optional; import org.apache.felix.ipojo.ComponentInstance; import org.apache.felix.ipojo.architecture.Architecture; import org.apache.felix.ipojo.architecture.InstanceDescription; import org.apache.felix.ipojo.runtime.core.test.dependencies.Common; import org.apache.felix.ipojo.runtime.core.test.services.CheckService; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.osgi.framework.ServiceReference; import java.util.Properties; import static org.junit.Assert.*; public class TestOptionalNoNullableDependencies extends Common { ComponentInstance instance1, instance2, instance3, instance4, instance5, instance6, instance7; ComponentInstance fooProvider; @Before public void setUp() { try { Properties prov = new Properties(); prov.put("instance.name", "FooProvider"); fooProvider = ipojoHelper.getFactory("FooProviderType-1").createComponentInstance(prov); fooProvider.stop(); Properties i1 = new Properties(); i1.put("instance.name", "Simple"); instance1 = ipojoHelper.getFactory("SimpleOptionalNoNullableCheckServiceProvider").createComponentInstance(i1); Properties i2 = new Properties(); i2.put("instance.name", "Void"); instance2 = ipojoHelper.getFactory("VoidOptionalNoNullableCheckServiceProvider").createComponentInstance(i2); Properties i3 = new Properties(); i3.put("instance.name", "Object"); instance3 = ipojoHelper.getFactory("ObjectOptionalNoNullableCheckServiceProvider").createComponentInstance(i3); Properties i4 = new Properties(); i4.put("instance.name", "Ref"); instance4 = ipojoHelper.getFactory("RefOptionalNoNullableCheckServiceProvider").createComponentInstance(i4); Properties i5 = new Properties(); i5.put("instance.name", "Both"); instance5 = ipojoHelper.getFactory("BothOptionalNoNullableCheckServiceProvider").createComponentInstance(i5); Properties i6 = new Properties(); i6.put("instance.name", "Map"); instance6 = ipojoHelper.getFactory("MapOptionalNoNullableCheckServiceProvider").createComponentInstance(i6); Properties i7 = new Properties(); i7.put("instance.name", "Dictionary"); instance7 = ipojoHelper.getFactory("DictOptionalNoNullableCheckServiceProvider").createComponentInstance(i7); } catch (Exception e) { e.getMessage(); fail(e.getMessage()); } } @After public void tearDown() { instance1.dispose(); instance2.dispose(); instance3.dispose(); instance4.dispose(); instance5.dispose(); instance6.dispose(); instance7.dispose(); fooProvider.dispose(); instance1 = null; instance2 = null; instance3 = null; instance4 = null; instance5 = null; instance6 = null; instance7 = null; fooProvider = null; } @Test public void testSimple() { ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance1.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance1.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); Properties props = cs.getProps(); //Check properties // no service, no nullable => runtime exception expected assertTrue("check excepted exception", ((Boolean) props.get("exception")).booleanValue()); assertNull("check CheckService invocation - 1 (" + props.get("result") + ")", props.get("result")); // Null assertEquals("check void bind invocation - 1", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 1", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 1", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 1", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 1", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 1", ((Integer) props.get("refU")).intValue(), 0); assertNull("Check FS invocation (object) - 1 (" + props.get("object") + ")", props.get("object")); fooProvider.start(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); assertNotNull("Check CheckService availability", cs_ref); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 2", ((Boolean) props.get("result")).booleanValue()); // True, a provider is there // No exception expected assertFalse("check unexcepted exception", ((Boolean) props.get("exception")).booleanValue()); assertEquals("check void bind invocation - 2", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 2", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 2", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 2", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 2", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 2", ((Integer) props.get("refU")).intValue(), 0); assertNotNull("Check FS invocation (object) - 2", props.get("object")); assertEquals("Check FS invocation (int) - 2", ((Integer) props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 2", ((Long) props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 2", ((Double) props.get("double")).doubleValue(), 1.0, 0); fooProvider.stop(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } @Test public void testVoid() { ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance2.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance2.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); Properties props = cs.getProps(); //Check properties assertNull("check CheckService invocation - 1", props.get("result")); // Null, no provider assertEquals("check void bind invocation - 1", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 1", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 1", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 1", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 1", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 1", ((Integer) props.get("refU")).intValue(), 0); assertNull("Check FS invocation (object) - 1", props.get("object")); fooProvider.start(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); assertNotNull("Check CheckService availability", cs_ref); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation -2", ((Boolean) props.get("result")).booleanValue()); assertEquals("check void bind invocation -2", ((Integer) props.get("voidB")).intValue(), 1); assertEquals("check void unbind callback invocation -2", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -2", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -2", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -2", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -2", ((Integer) props.get("refU")).intValue(), 0); assertNotNull("Check FS invocation (object) - 2", props.get("object")); assertEquals("Check FS invocation (int) - 2", ((Integer) props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 2", ((Long) props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 2", ((Double) props.get("double")).doubleValue(), 1.0, 0); fooProvider.stop(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertNull("check CheckService invocation -3", props.get("result")); assertEquals("check void bind invocation -3", ((Integer) props.get("voidB")).intValue(), 1); assertEquals("check void unbind callback invocation -3 (" + ((Integer) props.get("voidU")) + ")", ((Integer) props.get("voidU")).intValue(), 1); assertEquals("check object bind callback invocation -3", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -3", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -3", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -3", ((Integer) props.get("refU")).intValue(), 0); assertNull("Check FS invocation (object) - 3", props.get("object")); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } @Test public void testObject() { ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance3.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance3.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); Properties props = cs.getProps(); //Check properties assertNull("check CheckService invocation -1", props.get("result")); // Null, no provider assertEquals("check void bind invocation -1", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -1", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -1", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -1", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -1", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -1", ((Integer) props.get("refU")).intValue(), 0); fooProvider.start(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); assertNotNull("Check CheckService availability", cs_ref); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation -2", ((Boolean) props.get("result")).booleanValue()); assertEquals("check void bind invocation -2", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -2", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -2 (" + ((Integer) props.get("objectB")).intValue() + ")", ((Integer) props.get("objectB")).intValue(), 1); assertEquals("check object unbind callback invocation -2", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -2", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -2", ((Integer) props.get("refU")).intValue(), 0); fooProvider.stop(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertNull("check CheckService invocation -3", props.get("result")); // No provider. assertEquals("check void bind invocation -3", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -3", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -3", ((Integer) props.get("objectB")).intValue(), 1); assertEquals("check object unbind callback invocation -3", ((Integer) props.get("objectU")).intValue(), 1); assertEquals("check ref bind callback invocation -3", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -3", ((Integer) props.get("refU")).intValue(), 0); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } @Test public void testRef() { ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance4.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance4.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); Properties props = cs.getProps(); //Check properties assertNull("check CheckService invocation -1", props.get("result")); //Null, no provider assertEquals("check void bind invocation -1", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -1", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -1", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -1", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -1", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -1", ((Integer) props.get("refU")).intValue(), 0); fooProvider.start(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); assertNotNull("Check CheckService availability", cs_ref); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation -2", ((Boolean) props.get("result")).booleanValue()); assertEquals("check void bind invocation -2", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -2", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -2", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -2", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -2", ((Integer) props.get("refB")).intValue(), 1); assertEquals("check ref unbind callback invocation -2", ((Integer) props.get("refU")).intValue(), 0); fooProvider.stop(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertNull("check CheckService invocation -3", props.get("result")); // Null, no provider assertEquals("check void bind invocation -3", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -3", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -3", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -3", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -3", ((Integer) props.get("refB")).intValue(), 1); assertEquals("check ref unbind callback invocation -3", ((Integer) props.get("refU")).intValue(), 1); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } @Test public void testBoth() { ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance5.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance5.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); Properties props = cs.getProps(); //Check properties assertNull("check CheckService invocation -1", props.get("result")); // Null, no provider assertEquals("check void bind invocation -1", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -1", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -1", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -1", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check both bind callback invocation -1", ((Integer) props.get("bothB")).intValue(), 0); assertEquals("check both unbind callback invocation -1", ((Integer) props.get("bothU")).intValue(), 0); fooProvider.start(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); assertNotNull("Check CheckService availability", cs_ref); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation -2", ((Boolean) props.get("result")).booleanValue()); assertEquals("check void bind invocation -2", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -2", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -2", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -2", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -2", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -2", ((Integer) props.get("refU")).intValue(), 0); assertEquals("check both bind callback invocation -2", ((Integer) props.get("bothB")).intValue(), 1); assertEquals("check both unbind callback invocation -2", ((Integer) props.get("bothU")).intValue(), 0); fooProvider.stop(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertNull("check CheckService invocation -3", props.get("result")); // Null, no provider assertEquals("check void bind invocation -3", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -3", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -3", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -3", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -3", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -3", ((Integer) props.get("refU")).intValue(), 0); assertEquals("check both bind callback invocation -3", ((Integer) props.get("bothB")).intValue(), 1); assertEquals("check both unbind callback invocation -3", ((Integer) props.get("bothU")).intValue(), 1); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } @Test public void testDict() { ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance7.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance7.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); Properties props = cs.getProps(); //Check properties assertNull("check CheckService invocation -1", props.get("result")); // Null, no provider assertEquals("check void bind invocation -1", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -1", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -1", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -1", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check both bind callback invocation -1", ((Integer) props.get("bothB")).intValue(), 0); assertEquals("check both unbind callback invocation -1", ((Integer) props.get("bothU")).intValue(), 0); assertEquals("check map bind callback invocation -1", ((Integer) props.get("mapB")).intValue(), 0); assertEquals("check map unbind callback invocation -1", ((Integer) props.get("mapU")).intValue(), 0); assertEquals("check dict bind callback invocation -1", ((Integer) props.get("dictB")).intValue(), 0); assertEquals("check dict unbind callback invocation -1", ((Integer) props.get("dictU")).intValue(), 0); fooProvider.start(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); assertNotNull("Check CheckService availability", cs_ref); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation -2", ((Boolean) props.get("result")).booleanValue()); assertEquals("check void bind invocation -2", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -2", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -2", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -2", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -2", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -2", ((Integer) props.get("refU")).intValue(), 0); assertEquals("check both bind callback invocation -2", ((Integer) props.get("bothB")).intValue(), 0); assertEquals("check both unbind callback invocation -2", ((Integer) props.get("bothU")).intValue(), 0); assertEquals("check map bind callback invocation -2", ((Integer) props.get("mapB")).intValue(), 0); assertEquals("check map unbind callback invocation -2", ((Integer) props.get("mapU")).intValue(), 0); assertEquals("check dict bind callback invocation -2", ((Integer) props.get("dictB")).intValue(), 1); assertEquals("check dict unbind callback invocation -2", ((Integer) props.get("dictU")).intValue(), 0); fooProvider.stop(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertNull("check CheckService invocation -3", props.get("result")); // Null, no provider assertEquals("check void bind invocation -3", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -3", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -3", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -3", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -3", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -3", ((Integer) props.get("refU")).intValue(), 0); assertEquals("check both bind callback invocation -3", ((Integer) props.get("bothB")).intValue(), 0); assertEquals("check both unbind callback invocation -3", ((Integer) props.get("bothU")).intValue(), 0); assertEquals("check map bind callback invocation -3", ((Integer) props.get("mapB")).intValue(), 0); assertEquals("check map unbind callback invocation -3", ((Integer) props.get("mapU")).intValue(), 0); assertEquals("check dict bind callback invocation -3", ((Integer) props.get("dictB")).intValue(), 1); assertEquals("check dict unbind callback invocation -3", ((Integer) props.get("dictU")).intValue(), 1); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } @Test public void testMap() { ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance6.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance6.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); Properties props = cs.getProps(); //Check properties assertNull("check CheckService invocation -1", props.get("result")); // Null, no provider assertEquals("check void bind invocation -1", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -1", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -1", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -1", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check both bind callback invocation -1", ((Integer) props.get("bothB")).intValue(), 0); assertEquals("check both unbind callback invocation -1", ((Integer) props.get("bothU")).intValue(), 0); assertEquals("check map bind callback invocation -1", ((Integer) props.get("mapB")).intValue(), 0); assertEquals("check map unbind callback invocation -1", ((Integer) props.get("mapU")).intValue(), 0); assertEquals("check dict bind callback invocation -1", ((Integer) props.get("dictB")).intValue(), 0); assertEquals("check dict unbind callback invocation -1", ((Integer) props.get("dictU")).intValue(), 0); fooProvider.start(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); assertNotNull("Check CheckService availability", cs_ref); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation -2", ((Boolean) props.get("result")).booleanValue()); assertEquals("check void bind invocation -2", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -2", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -2", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -2", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -2", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -2", ((Integer) props.get("refU")).intValue(), 0); assertEquals("check both bind callback invocation -2", ((Integer) props.get("bothB")).intValue(), 0); assertEquals("check both unbind callback invocation -2", ((Integer) props.get("bothU")).intValue(), 0); assertEquals("check map bind callback invocation -2", ((Integer) props.get("mapB")).intValue(), 1); assertEquals("check map unbind callback invocation -2", ((Integer) props.get("mapU")).intValue(), 0); assertEquals("check dict bind callback invocation -2", ((Integer) props.get("dictB")).intValue(), 0); assertEquals("check dict unbind callback invocation -2", ((Integer) props.get("dictU")).intValue(), 0); fooProvider.stop(); id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref); props = cs.getProps(); //Check properties assertNull("check CheckService invocation -3", props.get("result")); // Null, no provider assertEquals("check void bind invocation -3", ((Integer) props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation -3", ((Integer) props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation -3", ((Integer) props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation -3", ((Integer) props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation -3", ((Integer) props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation -3", ((Integer) props.get("refU")).intValue(), 0); assertEquals("check both bind callback invocation -3", ((Integer) props.get("bothB")).intValue(), 0); assertEquals("check both unbind callback invocation -3", ((Integer) props.get("bothU")).intValue(), 0); assertEquals("check map bind callback invocation -3", ((Integer) props.get("mapB")).intValue(), 1); assertEquals("check map unbind callback invocation -3", ((Integer) props.get("mapU")).intValue(), 1); assertEquals("check dict bind callback invocation -3", ((Integer) props.get("dictB")).intValue(), 0); assertEquals("check dict unbind callback invocation -3", ((Integer) props.get("dictU")).intValue(), 0); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); } }
googleapis/google-auth-library-java
36,853
oauth2_http/javatests/com/google/auth/oauth2/UserAuthorizerTest.java
/* * Copyright 2015, Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.google.auth.oauth2; import static com.google.auth.TestUtils.WORKFORCE_IDENTITY_FEDERATION_AUTH_URI; import static com.google.auth.TestUtils.WORKFORCE_IDENTITY_FEDERATION_TOKEN_SERVER_URI; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.auth.TestUtils; import com.google.auth.http.HttpTransportFactory; import com.google.auth.oauth2.UserAuthorizer.ClientAuthenticationType; import com.google.auth.oauth2.UserAuthorizer.TokenResponseWithConfig; import java.io.IOException; import java.net.URI; import java.net.URL; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit Tests for UserAuthorizer */ @RunWith(JUnit4.class) public class UserAuthorizerTest { private static final String CLIENT_ID_VALUE = "ya29.1.AADtN_UtlxN3PuGAxrN2XQnZTVRvDyVWnYq4I6dws"; private static final String CLIENT_SECRET = "jakuaL9YyieakhECKL2SwZcu"; private static final String REFRESH_TOKEN = "1/Tl6awhpFjkMkSJoj1xsli0H2eL5YsMgU_NKPY2TyGWY"; private static final String ACCESS_TOKEN_VALUE = "1/MkSJoj1xsli0AccessToken_NKPY2"; private static final List<String> GRANTED_SCOPES = Arrays.asList("scope1", "scope2"); private static final String GRANTED_SCOPES_STRING = String.join(" ", GRANTED_SCOPES); private static final String DUMMY_SCOPE = "dummy_scope"; private static final List<String> DUMMY_SCOPES = Arrays.asList(DUMMY_SCOPE); private static final Long EXPIRATION_TIME = 504000300L; private static final AccessToken ACCESS_TOKEN = AccessToken.newBuilder() .setTokenValue(ACCESS_TOKEN_VALUE) .setExpirationTime(new Date(EXPIRATION_TIME)) .setScopes(GRANTED_SCOPES) .build(); private static final ClientId CLIENT_ID = ClientId.of(CLIENT_ID_VALUE, CLIENT_SECRET); private static final String USER_ID = "foo@bar.com"; private static final URI CALLBACK_URI = URI.create("/testcallback"); private static final String CODE = "thisistheend"; private static final URI BASE_URI = URI.create("http://example.com/foo"); private static final PKCEProvider pkce = new DefaultPKCEProvider(); @Test public void constructorMinimum() { TokenStore store = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(store) .build(); assertSame(CLIENT_ID, authorizer.getClientId()); assertSame(store, authorizer.getTokenStore()); assertEquals(DUMMY_SCOPES, authorizer.getScopes()); assertEquals(UserAuthorizer.DEFAULT_CALLBACK_URI, authorizer.getCallbackUri()); assertEquals( UserAuthorizer.ClientAuthenticationType.CLIENT_SECRET_POST, authorizer.getClientAuthenticationType()); } @Test public void constructorCommon() { TokenStore store = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(store) .setCallbackUri(CALLBACK_URI) .setClientAuthenticationType( UserAuthorizer.ClientAuthenticationType.CLIENT_SECRET_BASIC) .build(); assertSame(CLIENT_ID, authorizer.getClientId()); assertSame(store, authorizer.getTokenStore()); assertEquals(DUMMY_SCOPES, authorizer.getScopes()); assertEquals(CALLBACK_URI, authorizer.getCallbackUri()); assertEquals( UserAuthorizer.ClientAuthenticationType.CLIENT_SECRET_BASIC, authorizer.getClientAuthenticationType()); } @Test public void constructorWithClientAuthenticationTypeNone() { TokenStore store = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(store) .setCallbackUri(CALLBACK_URI) .setClientAuthenticationType(UserAuthorizer.ClientAuthenticationType.NONE) .build(); assertSame(CLIENT_ID, authorizer.getClientId()); assertSame(store, authorizer.getTokenStore()); assertEquals(DUMMY_SCOPES, authorizer.getScopes()); assertEquals(CALLBACK_URI, authorizer.getCallbackUri()); assertEquals( UserAuthorizer.ClientAuthenticationType.NONE, authorizer.getClientAuthenticationType()); } @Test(expected = NullPointerException.class) public void constructorCommon_nullClientId_throws() { UserAuthorizer.newBuilder().setScopes(DUMMY_SCOPES).setCallbackUri(CALLBACK_URI).build(); } @Test(expected = NullPointerException.class) public void constructorCommon_nullScopes_throws() { UserAuthorizer.newBuilder().setClientId(CLIENT_ID).build(); } @Test public void getCallbackUri_relativeToBase() { final URI callbackURI = URI.create("/bar"); final URI expectedCallbackURI = URI.create("http://example.com/bar"); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setCallbackUri(callbackURI) .build(); URI absoluteCallbackURI = authorizer.getCallbackUri(BASE_URI); assertEquals(expectedCallbackURI, absoluteCallbackURI); } @Test public void getAuthorizationUrl() throws IOException { final String CUSTOM_STATE = "custom_state"; final String PROTOCOL = "https"; final String HOST = "accounts.test.com"; final String PATH = "/o/o/oauth2/auth"; final URI AUTH_URI = URI.create(PROTOCOL + "://" + HOST + PATH); final String EXPECTED_CALLBACK = "http://example.com" + CALLBACK_URI.toString(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setCallbackUri(CALLBACK_URI) .setUserAuthUri(AUTH_URI) .setPKCEProvider(pkce) .build(); URL authorizationUrl = authorizer.getAuthorizationUrl(USER_ID, CUSTOM_STATE, BASE_URI); assertEquals(PROTOCOL, authorizationUrl.getProtocol()); assertEquals(-1, authorizationUrl.getPort()); assertEquals(PATH, authorizationUrl.getPath()); assertEquals(HOST, authorizationUrl.getHost()); String query = authorizationUrl.getQuery(); Map<String, String> parameters = TestUtils.parseQuery(query); assertEquals(CUSTOM_STATE, parameters.get("state")); assertEquals(USER_ID, parameters.get("login_hint")); assertEquals(EXPECTED_CALLBACK, parameters.get("redirect_uri")); assertEquals(CLIENT_ID_VALUE, parameters.get("client_id")); assertEquals(DUMMY_SCOPE, parameters.get("scope")); assertEquals("code", parameters.get("response_type")); assertEquals(pkce.getCodeChallenge(), parameters.get("code_challenge")); assertEquals(pkce.getCodeChallengeMethod(), parameters.get("code_challenge_method")); assertEquals("consent", parameters.get("prompt")); } @Test public void getAuthorizationUrl_additionalParameters() throws IOException { final String CUSTOM_STATE = "custom_state"; final String PROTOCOL = "https"; final String HOST = "accounts.test.com"; final String PATH = "/o/o/oauth2/auth"; final URI AUTH_URI = URI.create(PROTOCOL + "://" + HOST + PATH); final String EXPECTED_CALLBACK = "http://example.com" + CALLBACK_URI.toString(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setCallbackUri(CALLBACK_URI) .setUserAuthUri(AUTH_URI) .build(); Map<String, String> additionalParameters = new HashMap<String, String>(); additionalParameters.put("param1", "value1"); additionalParameters.put("param2", "value2"); // Verify that the authorization URL doesn't include the additional parameters if they are not // passed in. URL authorizationUrl = authorizer.getAuthorizationUrl(USER_ID, CUSTOM_STATE, BASE_URI); String query = authorizationUrl.getQuery(); Map<String, String> parameters = TestUtils.parseQuery(query); assertFalse(parameters.containsKey("param1")); assertFalse(parameters.containsKey("param2")); // Verify that the authorization URL includes the additional parameters if they are passed in. authorizationUrl = authorizer.getAuthorizationUrl(USER_ID, CUSTOM_STATE, BASE_URI, additionalParameters); query = authorizationUrl.getQuery(); parameters = TestUtils.parseQuery(query); assertEquals("value1", parameters.get("param1")); assertEquals("value2", parameters.get("param2")); // Verify that the authorization URL doesn't include the additional parameters passed in the // previous call to the authorizer authorizationUrl = authorizer.getAuthorizationUrl(USER_ID, CUSTOM_STATE, BASE_URI); query = authorizationUrl.getQuery(); parameters = TestUtils.parseQuery(query); assertFalse(parameters.containsKey("param1")); assertFalse(parameters.containsKey("param2")); } @Test public void getCredentials_noCredentials_returnsNull() throws IOException { UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(new MemoryTokensStorage()) .build(); UserCredentials credentials = authorizer.getCredentials(USER_ID); assertNull(credentials); } @Test public void testGetTokenResponseFromAuthCodeExchange_convertsCodeToTokens() throws IOException { MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.addAuthorizationCode( CODE, REFRESH_TOKEN, ACCESS_TOKEN_VALUE, GRANTED_SCOPES_STRING, /* additionalParameters= */ null); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setHttpTransportFactory(transportFactory) .build(); TokenResponseWithConfig response = authorizer.getTokenResponseFromAuthCodeExchange( CODE, BASE_URI, /* additionalParameters= */ null); assertEquals(REFRESH_TOKEN, response.getRefreshToken()); assertNotNull(response.getAccessToken()); assertEquals(ACCESS_TOKEN_VALUE, response.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, response.getAccessToken().getScopes()); } @Test public void testGetTokenResponseFromAuthCodeExchange_workforceIdentityFederationClientAuthBasic() throws IOException { MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.setClientAuthType(ClientAuthenticationType.CLIENT_SECRET_BASIC); transportFactory.transport.setPkceProvider(new DefaultPKCEProvider()); transportFactory.transport.addAuthorizationCode( CODE, REFRESH_TOKEN, ACCESS_TOKEN_VALUE, GRANTED_SCOPES_STRING, /* additionalParameters= */ null); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(Collections.singletonList("https://www.googleapis.com/auth/cloud-platform")) .setTokenServerUri(WORKFORCE_IDENTITY_FEDERATION_TOKEN_SERVER_URI) .setUserAuthUri(WORKFORCE_IDENTITY_FEDERATION_AUTH_URI) .setClientAuthenticationType(ClientAuthenticationType.CLIENT_SECRET_BASIC) .setPKCEProvider(new DefaultPKCEProvider()) .setHttpTransportFactory(transportFactory) .build(); TokenResponseWithConfig response = authorizer.getTokenResponseFromAuthCodeExchange( CODE, BASE_URI, /* additionalParameters= */ null); assertEquals(REFRESH_TOKEN, response.getRefreshToken()); assertNotNull(response.getAccessToken()); assertEquals(ACCESS_TOKEN_VALUE, response.getAccessToken().getTokenValue()); Map<String, List<String>> headers = transportFactory.transport.getRequest().getHeaders(); List<String> authHeader = headers.get("authorization"); assertEquals( OAuth2Utils.generateBasicAuthHeader(CLIENT_ID_VALUE, CLIENT_SECRET), authHeader.iterator().next()); assertEquals(1, authHeader.size()); } @Test public void testGetTokenResponseFromAuthCodeExchange_workforceIdentityFederationNoClientAuth() throws IOException { MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.setClientAuthType(ClientAuthenticationType.CLIENT_SECRET_POST); transportFactory.transport.addAuthorizationCode( CODE, REFRESH_TOKEN, ACCESS_TOKEN_VALUE, GRANTED_SCOPES_STRING, /* additionalParameters= */ null); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(Collections.singletonList("https://www.googleapis.com/auth/cloud-platform")) .setTokenServerUri(WORKFORCE_IDENTITY_FEDERATION_TOKEN_SERVER_URI) .setUserAuthUri(WORKFORCE_IDENTITY_FEDERATION_AUTH_URI) .setClientAuthenticationType(ClientAuthenticationType.NONE) .setHttpTransportFactory(transportFactory) .build(); TokenResponseWithConfig response = authorizer.getTokenResponseFromAuthCodeExchange( CODE, BASE_URI, /* additionalParameters= */ null); assertEquals(REFRESH_TOKEN, response.getRefreshToken()); assertNotNull(response.getAccessToken()); assertEquals(ACCESS_TOKEN_VALUE, response.getAccessToken().getTokenValue()); Map<String, List<String>> headers = transportFactory.transport.getRequest().getHeaders(); assertNull(headers.get("authorization")); } @Test public void testGetTokenResponseFromAuthCodeExchange_missingAuthCode_throws() { UserAuthorizer authorizer = UserAuthorizer.newBuilder().setClientId(CLIENT_ID).setScopes(DUMMY_SCOPES).build(); assertThrows( NullPointerException.class, () -> { authorizer.getTokenResponseFromAuthCodeExchange( /* code= */ null, BASE_URI, /* additionalParameters= */ null); }); } @Test public void testGetTokenResponseFromAuthCodeExchange_missingAccessToken_throws() throws IOException { MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); // Missing access token. transportFactory.transport.addAuthorizationCode( CODE, REFRESH_TOKEN, /* accessToken= */ null, GRANTED_SCOPES_STRING, /* additionalParameters= */ null); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setHttpTransportFactory(transportFactory) .build(); IOException e = assertThrows( IOException.class, () -> { authorizer.getTokenResponseFromAuthCodeExchange( CODE, BASE_URI, /* additionalParameters= */ null); }); assertTrue( e.getMessage() .contains("Error reading result of Token API:Expected value access_token not found.")); } @Test public void getCredentials_storedCredentials_returnsStored() throws IOException { TokenStore tokenStore = new MemoryTokensStorage(); UserCredentials initialCredentials = UserCredentials.newBuilder() .setClientId(CLIENT_ID_VALUE) .setClientSecret(CLIENT_SECRET) .setRefreshToken(REFRESH_TOKEN) .setAccessToken(ACCESS_TOKEN) .build(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .build(); authorizer.storeCredentials(USER_ID, initialCredentials); UserCredentials credentials = authorizer.getCredentials(USER_ID); assertEquals(REFRESH_TOKEN, credentials.getRefreshToken()); assertEquals(ACCESS_TOKEN_VALUE, credentials.getAccessToken().getTokenValue()); assertEquals(EXPIRATION_TIME, credentials.getAccessToken().getExpirationTimeMillis()); assertEquals(GRANTED_SCOPES, credentials.getAccessToken().getScopes()); } @Test(expected = NullPointerException.class) public void getCredentials_nullUserId_throws() throws IOException { TokenStore tokenStore = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .build(); authorizer.getCredentials(null); } @Test public void getCredentials_refreshedToken_stored() throws IOException { final String accessTokenValue1 = "1/MkSJoj1xsli0AccessToken_NKPY2"; final String accessTokenValue2 = "2/MkSJoj1xsli0AccessToken_NKPY2"; AccessToken accessToken1 = AccessToken.newBuilder() .setTokenValue(accessTokenValue1) .setExpirationTime(new Date(EXPIRATION_TIME)) .setScopes(GRANTED_SCOPES) .build(); MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.addRefreshToken( REFRESH_TOKEN, accessTokenValue2, GRANTED_SCOPES_STRING); TokenStore tokenStore = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .setHttpTransportFactory(transportFactory) .build(); UserCredentials originalCredentials = UserCredentials.newBuilder() .setClientId(CLIENT_ID_VALUE) .setClientSecret(CLIENT_SECRET) .setRefreshToken(REFRESH_TOKEN) .setAccessToken(accessToken1) .setHttpTransportFactory(transportFactory) .build(); authorizer.storeCredentials(USER_ID, originalCredentials); UserCredentials credentials1 = authorizer.getCredentials(USER_ID); assertEquals(REFRESH_TOKEN, credentials1.getRefreshToken()); assertEquals(accessTokenValue1, credentials1.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials1.getAccessToken().getScopes()); // Refresh the token to get update from token server credentials1.refresh(); assertEquals(REFRESH_TOKEN, credentials1.getRefreshToken()); assertEquals(accessTokenValue2, credentials1.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials1.getAccessToken().getScopes()); // Load a second credentials instance UserCredentials credentials2 = authorizer.getCredentials(USER_ID); // Verify that token refresh stored the updated tokens assertEquals(REFRESH_TOKEN, credentials2.getRefreshToken()); assertEquals(accessTokenValue2, credentials2.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials2.getAccessToken().getScopes()); } @Test public void getCredentials_refreshedToken_different_granted_scopes() throws IOException { final String accessTokenValue1 = "1/MkSJoj1xsli0AccessToken_NKPY2"; final String accessTokenValue2 = "2/MkSJoj1xsli0AccessToken_NKPY2"; final List<String> grantedRefreshScopes = Arrays.asList("scope3"); AccessToken accessToken1 = AccessToken.newBuilder() .setTokenValue(accessTokenValue1) .setExpirationTime(new Date(EXPIRATION_TIME)) .setScopes(GRANTED_SCOPES) .build(); MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.addRefreshToken(REFRESH_TOKEN, accessTokenValue2, "scope3"); TokenStore tokenStore = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .setHttpTransportFactory(transportFactory) .build(); UserCredentials originalCredentials = UserCredentials.newBuilder() .setClientId(CLIENT_ID_VALUE) .setClientSecret(CLIENT_SECRET) .setRefreshToken(REFRESH_TOKEN) .setAccessToken(accessToken1) .setHttpTransportFactory(transportFactory) .build(); authorizer.storeCredentials(USER_ID, originalCredentials); UserCredentials credentials1 = authorizer.getCredentials(USER_ID); assertEquals(REFRESH_TOKEN, credentials1.getRefreshToken()); assertEquals(accessTokenValue1, credentials1.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials1.getAccessToken().getScopes()); // Refresh the token to get update from token server credentials1.refresh(); assertEquals(REFRESH_TOKEN, credentials1.getRefreshToken()); assertEquals(accessTokenValue2, credentials1.getAccessToken().getTokenValue()); assertEquals(grantedRefreshScopes, credentials1.getAccessToken().getScopes()); // Load a second credentials instance UserCredentials credentials2 = authorizer.getCredentials(USER_ID); // Verify that token refresh stored the updated tokens assertEquals(REFRESH_TOKEN, credentials2.getRefreshToken()); assertEquals(accessTokenValue2, credentials2.getAccessToken().getTokenValue()); assertEquals(grantedRefreshScopes, credentials2.getAccessToken().getScopes()); } @Test public void getCredentialsFromCode_convertsCodeToTokens() throws IOException { MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.addAuthorizationCode( CODE, REFRESH_TOKEN, ACCESS_TOKEN_VALUE, GRANTED_SCOPES_STRING, null); TokenStore tokenStore = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .setHttpTransportFactory(transportFactory) .build(); UserCredentials credentials = authorizer.getCredentialsFromCode(CODE, BASE_URI); assertEquals(REFRESH_TOKEN, credentials.getRefreshToken()); assertEquals(ACCESS_TOKEN_VALUE, credentials.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials.getAccessToken().getScopes()); } @Test public void getCredentialsFromCode_additionalParameters() throws IOException { MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); Map<String, String> additionalParameters = new HashMap<String, String>(); additionalParameters.put("param1", "value1"); additionalParameters.put("param2", "value2"); String code2 = "code2"; String refreshToken2 = "refreshToken2"; String accessTokenValue2 = "accessTokenValue2"; transportFactory.transport.addAuthorizationCode( CODE, REFRESH_TOKEN, ACCESS_TOKEN_VALUE, GRANTED_SCOPES_STRING, null); transportFactory.transport.addAuthorizationCode( code2, refreshToken2, accessTokenValue2, GRANTED_SCOPES_STRING, additionalParameters); TokenStore tokenStore = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .setHttpTransportFactory(transportFactory) .build(); // Verify that the additional parameters are not attached to the post body when not specified UserCredentials credentials = authorizer.getCredentialsFromCode(CODE, BASE_URI); assertEquals(REFRESH_TOKEN, credentials.getRefreshToken()); assertEquals(ACCESS_TOKEN_VALUE, credentials.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials.getAccessToken().getScopes()); // Verify that the additional parameters are attached to the post body when specified credentials = authorizer.getCredentialsFromCode(code2, BASE_URI, additionalParameters); assertEquals(refreshToken2, credentials.getRefreshToken()); assertEquals(accessTokenValue2, credentials.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials.getAccessToken().getScopes()); // Verify that the additional parameters from previous request are not attached to the post body credentials = authorizer.getCredentialsFromCode(CODE, BASE_URI); assertEquals(REFRESH_TOKEN, credentials.getRefreshToken()); assertEquals(ACCESS_TOKEN_VALUE, credentials.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials.getAccessToken().getScopes()); } @Test(expected = NullPointerException.class) public void getCredentialsFromCode_nullCode_throws() throws IOException { UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(new MemoryTokensStorage()) .build(); authorizer.getCredentialsFromCode(null, BASE_URI); } @Test public void getAndStoreCredentialsFromCode_getAndStoresCredentials() throws IOException { final String accessTokenValue1 = "1/MkSJoj1xsli0AccessToken_NKPY2"; final String accessTokenValue2 = "2/MkSJoj1xsli0AccessToken_NKPY2"; MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.addAuthorizationCode( CODE, REFRESH_TOKEN, accessTokenValue1, GRANTED_SCOPES_STRING, null); TokenStore tokenStore = new MemoryTokensStorage(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .setHttpTransportFactory(transportFactory) .build(); UserCredentials credentials1 = authorizer.getAndStoreCredentialsFromCode(USER_ID, CODE, BASE_URI); assertEquals(REFRESH_TOKEN, credentials1.getRefreshToken()); assertEquals(GRANTED_SCOPES, credentials1.getAccessToken().getScopes()); assertEquals(accessTokenValue1, credentials1.getAccessToken().getTokenValue()); // Refresh the token to get update from token server transportFactory.transport.addRefreshToken(REFRESH_TOKEN, accessTokenValue2); credentials1.refresh(); assertEquals(REFRESH_TOKEN, credentials1.getRefreshToken()); assertEquals(accessTokenValue2, credentials1.getAccessToken().getTokenValue()); // Load a second credentials instance UserCredentials credentials2 = authorizer.getCredentials(USER_ID); // Verify that token refresh stored the updated tokens assertEquals(REFRESH_TOKEN, credentials2.getRefreshToken()); assertEquals(GRANTED_SCOPES, credentials2.getAccessToken().getScopes()); assertEquals(accessTokenValue2, credentials2.getAccessToken().getTokenValue()); } @Test(expected = NullPointerException.class) public void getAndStoreCredentialsFromCode_nullCode_throws() throws IOException { UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(new MemoryTokensStorage()) .build(); authorizer.getAndStoreCredentialsFromCode(USER_ID, null, BASE_URI); } @Test(expected = NullPointerException.class) public void getAndStoreCredentialsFromCode_nullUserId_throws() throws IOException { UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(new MemoryTokensStorage()) .build(); authorizer.getAndStoreCredentialsFromCode(null, CODE, BASE_URI); } @Test public void revokeAuthorization_revokesAndClears() throws IOException { TokenStore tokenStore = new MemoryTokensStorage(); MockTokenServerTransportFactory transportFactory = new MockTokenServerTransportFactory(); transportFactory.transport.addClient(CLIENT_ID_VALUE, CLIENT_SECRET); transportFactory.transport.addRefreshToken( REFRESH_TOKEN, ACCESS_TOKEN_VALUE, GRANTED_SCOPES_STRING); UserCredentials initialCredentials = UserCredentials.newBuilder() .setClientId(CLIENT_ID_VALUE) .setClientSecret(CLIENT_SECRET) .setRefreshToken(REFRESH_TOKEN) .setAccessToken(ACCESS_TOKEN) .build(); UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(tokenStore) .setHttpTransportFactory(transportFactory) .build(); authorizer.storeCredentials(USER_ID, initialCredentials); UserCredentials credentials1 = authorizer.getCredentials(USER_ID); assertEquals(REFRESH_TOKEN, credentials1.getRefreshToken()); credentials1.refresh(); assertEquals(ACCESS_TOKEN_VALUE, credentials1.getAccessToken().getTokenValue()); assertEquals(GRANTED_SCOPES, credentials1.getAccessToken().getScopes()); authorizer.revokeAuthorization(USER_ID); try { credentials1.refresh(); fail("Credentials should not refresh after revoke."); } catch (IOException expected) { // Expected } UserCredentials credentials2 = authorizer.getCredentials(USER_ID); assertNull(credentials2); } @Test(expected = IllegalArgumentException.class) public void nullCodeVerifierPKCEProvider() { PKCEProvider pkce = new PKCEProvider() { @Override public String getCodeVerifier() { return null; } @Override public String getCodeChallengeMethod() { return "dummy string"; } @Override public String getCodeChallenge() { return "dummy string"; } }; UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(new MemoryTokensStorage()) .setPKCEProvider(pkce) .build(); } @Test(expected = IllegalArgumentException.class) public void nullCodeChallengePKCEProvider() { PKCEProvider pkce = new PKCEProvider() { @Override public String getCodeVerifier() { return "dummy string"; } @Override public String getCodeChallengeMethod() { return "dummy string"; } @Override public String getCodeChallenge() { return null; } }; UserAuthorizer authorizer = UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(new MemoryTokensStorage()) .setPKCEProvider(pkce) .build(); } @Test(expected = IllegalArgumentException.class) public void nullCodeChallengeMethodPKCEProvider() { PKCEProvider pkce = new PKCEProvider() { @Override public String getCodeVerifier() { return "dummy string"; } @Override public String getCodeChallengeMethod() { return null; } @Override public String getCodeChallenge() { return "dummy string"; } }; UserAuthorizer.newBuilder() .setClientId(CLIENT_ID) .setScopes(DUMMY_SCOPES) .setTokenStore(new MemoryTokensStorage()) .setPKCEProvider(pkce) .build(); } @Test public void testTokenResponseWithConfig() { String clientId = "testClientId"; String clientSecret = "testClientSecret"; String refreshToken = "testRefreshToken"; AccessToken accessToken = new AccessToken("token", new Date()); URI tokenServerUri = URI.create("https://example.com/token"); HttpTransportFactory httpTransportFactory = new MockTokenServerTransportFactory(); TokenResponseWithConfig tokenResponse = TokenResponseWithConfig.newBuilder() .setClientId(clientId) .setClientSecret(clientSecret) .setRefreshToken(refreshToken) .setAccessToken(accessToken) .setTokenServerUri(tokenServerUri) .setHttpTransportFactory(httpTransportFactory) .build(); assertEquals(clientId, tokenResponse.getClientId()); assertEquals(clientSecret, tokenResponse.getClientSecret()); assertEquals(refreshToken, tokenResponse.getRefreshToken()); assertEquals(accessToken, tokenResponse.getAccessToken()); assertEquals(tokenServerUri, tokenResponse.getTokenServerUri()); assertEquals(httpTransportFactory, tokenResponse.getHttpTransportFactory()); } @Test public void testTokenResponseWithConfig_noRefreshToken() { String clientId = "testClientId"; String clientSecret = "testClientSecret"; AccessToken accessToken = new AccessToken("token", new Date()); URI tokenServerUri = URI.create("https://example.com/token"); HttpTransportFactory httpTransportFactory = new MockTokenServerTransportFactory(); TokenResponseWithConfig tokenResponse = TokenResponseWithConfig.newBuilder() .setClientId(clientId) .setClientSecret(clientSecret) .setAccessToken(accessToken) .setTokenServerUri(tokenServerUri) .setHttpTransportFactory(httpTransportFactory) .build(); assertEquals(clientId, tokenResponse.getClientId()); assertEquals(clientSecret, tokenResponse.getClientSecret()); assertEquals(accessToken, tokenResponse.getAccessToken()); assertEquals(tokenServerUri, tokenResponse.getTokenServerUri()); assertEquals(httpTransportFactory, tokenResponse.getHttpTransportFactory()); assertNull(tokenResponse.getRefreshToken()); } }
googleapis/google-cloud-java
36,744
java-datastream/proto-google-cloud-datastream-v1/src/main/java/com/google/cloud/datastream/v1/MongodbSourceConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datastream/v1/datastream_resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datastream.v1; /** * * * <pre> * MongoDB source configuration. * </pre> * * Protobuf type {@code google.cloud.datastream.v1.MongodbSourceConfig} */ public final class MongodbSourceConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.MongodbSourceConfig) MongodbSourceConfigOrBuilder { private static final long serialVersionUID = 0L; // Use MongodbSourceConfig.newBuilder() to construct. private MongodbSourceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MongodbSourceConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MongodbSourceConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_MongodbSourceConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_MongodbSourceConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datastream.v1.MongodbSourceConfig.class, com.google.cloud.datastream.v1.MongodbSourceConfig.Builder.class); } private int bitField0_; public static final int INCLUDE_OBJECTS_FIELD_NUMBER = 1; private com.google.cloud.datastream.v1.MongodbCluster includeObjects_; /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> * * @return Whether the includeObjects field is set. */ @java.lang.Override public boolean hasIncludeObjects() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> * * @return The includeObjects. */ @java.lang.Override public com.google.cloud.datastream.v1.MongodbCluster getIncludeObjects() { return includeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : includeObjects_; } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ @java.lang.Override public com.google.cloud.datastream.v1.MongodbClusterOrBuilder getIncludeObjectsOrBuilder() { return includeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : includeObjects_; } public static final int EXCLUDE_OBJECTS_FIELD_NUMBER = 2; private com.google.cloud.datastream.v1.MongodbCluster excludeObjects_; /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> * * @return Whether the excludeObjects field is set. */ @java.lang.Override public boolean hasExcludeObjects() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> * * @return The excludeObjects. */ @java.lang.Override public com.google.cloud.datastream.v1.MongodbCluster getExcludeObjects() { return excludeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : excludeObjects_; } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ @java.lang.Override public com.google.cloud.datastream.v1.MongodbClusterOrBuilder getExcludeObjectsOrBuilder() { return excludeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : excludeObjects_; } public static final int MAX_CONCURRENT_BACKFILL_TASKS_FIELD_NUMBER = 3; private int maxConcurrentBackfillTasks_ = 0; /** * * * <pre> * Optional. Maximum number of concurrent backfill tasks. The number should be * non-negative and less than or equal to 50. If not set (or set to 0), the * system's default value is used * </pre> * * <code>int32 max_concurrent_backfill_tasks = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The maxConcurrentBackfillTasks. */ @java.lang.Override public int getMaxConcurrentBackfillTasks() { return maxConcurrentBackfillTasks_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getIncludeObjects()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getExcludeObjects()); } if (maxConcurrentBackfillTasks_ != 0) { output.writeInt32(3, maxConcurrentBackfillTasks_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getIncludeObjects()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getExcludeObjects()); } if (maxConcurrentBackfillTasks_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, maxConcurrentBackfillTasks_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datastream.v1.MongodbSourceConfig)) { return super.equals(obj); } com.google.cloud.datastream.v1.MongodbSourceConfig other = (com.google.cloud.datastream.v1.MongodbSourceConfig) obj; if (hasIncludeObjects() != other.hasIncludeObjects()) return false; if (hasIncludeObjects()) { if (!getIncludeObjects().equals(other.getIncludeObjects())) return false; } if (hasExcludeObjects() != other.hasExcludeObjects()) return false; if (hasExcludeObjects()) { if (!getExcludeObjects().equals(other.getExcludeObjects())) return false; } if (getMaxConcurrentBackfillTasks() != other.getMaxConcurrentBackfillTasks()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIncludeObjects()) { hash = (37 * hash) + INCLUDE_OBJECTS_FIELD_NUMBER; hash = (53 * hash) + getIncludeObjects().hashCode(); } if (hasExcludeObjects()) { hash = (37 * hash) + EXCLUDE_OBJECTS_FIELD_NUMBER; hash = (53 * hash) + getExcludeObjects().hashCode(); } hash = (37 * hash) + MAX_CONCURRENT_BACKFILL_TASKS_FIELD_NUMBER; hash = (53 * hash) + getMaxConcurrentBackfillTasks(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1.MongodbSourceConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.datastream.v1.MongodbSourceConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * MongoDB source configuration. * </pre> * * Protobuf type {@code google.cloud.datastream.v1.MongodbSourceConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.MongodbSourceConfig) com.google.cloud.datastream.v1.MongodbSourceConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_MongodbSourceConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_MongodbSourceConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datastream.v1.MongodbSourceConfig.class, com.google.cloud.datastream.v1.MongodbSourceConfig.Builder.class); } // Construct using com.google.cloud.datastream.v1.MongodbSourceConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getIncludeObjectsFieldBuilder(); getExcludeObjectsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; includeObjects_ = null; if (includeObjectsBuilder_ != null) { includeObjectsBuilder_.dispose(); includeObjectsBuilder_ = null; } excludeObjects_ = null; if (excludeObjectsBuilder_ != null) { excludeObjectsBuilder_.dispose(); excludeObjectsBuilder_ = null; } maxConcurrentBackfillTasks_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datastream.v1.DatastreamResourcesProto .internal_static_google_cloud_datastream_v1_MongodbSourceConfig_descriptor; } @java.lang.Override public com.google.cloud.datastream.v1.MongodbSourceConfig getDefaultInstanceForType() { return com.google.cloud.datastream.v1.MongodbSourceConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datastream.v1.MongodbSourceConfig build() { com.google.cloud.datastream.v1.MongodbSourceConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datastream.v1.MongodbSourceConfig buildPartial() { com.google.cloud.datastream.v1.MongodbSourceConfig result = new com.google.cloud.datastream.v1.MongodbSourceConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.datastream.v1.MongodbSourceConfig result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.includeObjects_ = includeObjectsBuilder_ == null ? includeObjects_ : includeObjectsBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.excludeObjects_ = excludeObjectsBuilder_ == null ? excludeObjects_ : excludeObjectsBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.maxConcurrentBackfillTasks_ = maxConcurrentBackfillTasks_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datastream.v1.MongodbSourceConfig) { return mergeFrom((com.google.cloud.datastream.v1.MongodbSourceConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datastream.v1.MongodbSourceConfig other) { if (other == com.google.cloud.datastream.v1.MongodbSourceConfig.getDefaultInstance()) return this; if (other.hasIncludeObjects()) { mergeIncludeObjects(other.getIncludeObjects()); } if (other.hasExcludeObjects()) { mergeExcludeObjects(other.getExcludeObjects()); } if (other.getMaxConcurrentBackfillTasks() != 0) { setMaxConcurrentBackfillTasks(other.getMaxConcurrentBackfillTasks()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getIncludeObjectsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getExcludeObjectsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { maxConcurrentBackfillTasks_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.datastream.v1.MongodbCluster includeObjects_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datastream.v1.MongodbCluster, com.google.cloud.datastream.v1.MongodbCluster.Builder, com.google.cloud.datastream.v1.MongodbClusterOrBuilder> includeObjectsBuilder_; /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> * * @return Whether the includeObjects field is set. */ public boolean hasIncludeObjects() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> * * @return The includeObjects. */ public com.google.cloud.datastream.v1.MongodbCluster getIncludeObjects() { if (includeObjectsBuilder_ == null) { return includeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : includeObjects_; } else { return includeObjectsBuilder_.getMessage(); } } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ public Builder setIncludeObjects(com.google.cloud.datastream.v1.MongodbCluster value) { if (includeObjectsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } includeObjects_ = value; } else { includeObjectsBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ public Builder setIncludeObjects( com.google.cloud.datastream.v1.MongodbCluster.Builder builderForValue) { if (includeObjectsBuilder_ == null) { includeObjects_ = builderForValue.build(); } else { includeObjectsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ public Builder mergeIncludeObjects(com.google.cloud.datastream.v1.MongodbCluster value) { if (includeObjectsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && includeObjects_ != null && includeObjects_ != com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance()) { getIncludeObjectsBuilder().mergeFrom(value); } else { includeObjects_ = value; } } else { includeObjectsBuilder_.mergeFrom(value); } if (includeObjects_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ public Builder clearIncludeObjects() { bitField0_ = (bitField0_ & ~0x00000001); includeObjects_ = null; if (includeObjectsBuilder_ != null) { includeObjectsBuilder_.dispose(); includeObjectsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ public com.google.cloud.datastream.v1.MongodbCluster.Builder getIncludeObjectsBuilder() { bitField0_ |= 0x00000001; onChanged(); return getIncludeObjectsFieldBuilder().getBuilder(); } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ public com.google.cloud.datastream.v1.MongodbClusterOrBuilder getIncludeObjectsOrBuilder() { if (includeObjectsBuilder_ != null) { return includeObjectsBuilder_.getMessageOrBuilder(); } else { return includeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : includeObjects_; } } /** * * * <pre> * MongoDB collections to include in the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster include_objects = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datastream.v1.MongodbCluster, com.google.cloud.datastream.v1.MongodbCluster.Builder, com.google.cloud.datastream.v1.MongodbClusterOrBuilder> getIncludeObjectsFieldBuilder() { if (includeObjectsBuilder_ == null) { includeObjectsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datastream.v1.MongodbCluster, com.google.cloud.datastream.v1.MongodbCluster.Builder, com.google.cloud.datastream.v1.MongodbClusterOrBuilder>( getIncludeObjects(), getParentForChildren(), isClean()); includeObjects_ = null; } return includeObjectsBuilder_; } private com.google.cloud.datastream.v1.MongodbCluster excludeObjects_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datastream.v1.MongodbCluster, com.google.cloud.datastream.v1.MongodbCluster.Builder, com.google.cloud.datastream.v1.MongodbClusterOrBuilder> excludeObjectsBuilder_; /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> * * @return Whether the excludeObjects field is set. */ public boolean hasExcludeObjects() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> * * @return The excludeObjects. */ public com.google.cloud.datastream.v1.MongodbCluster getExcludeObjects() { if (excludeObjectsBuilder_ == null) { return excludeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : excludeObjects_; } else { return excludeObjectsBuilder_.getMessage(); } } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ public Builder setExcludeObjects(com.google.cloud.datastream.v1.MongodbCluster value) { if (excludeObjectsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } excludeObjects_ = value; } else { excludeObjectsBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ public Builder setExcludeObjects( com.google.cloud.datastream.v1.MongodbCluster.Builder builderForValue) { if (excludeObjectsBuilder_ == null) { excludeObjects_ = builderForValue.build(); } else { excludeObjectsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ public Builder mergeExcludeObjects(com.google.cloud.datastream.v1.MongodbCluster value) { if (excludeObjectsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && excludeObjects_ != null && excludeObjects_ != com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance()) { getExcludeObjectsBuilder().mergeFrom(value); } else { excludeObjects_ = value; } } else { excludeObjectsBuilder_.mergeFrom(value); } if (excludeObjects_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ public Builder clearExcludeObjects() { bitField0_ = (bitField0_ & ~0x00000002); excludeObjects_ = null; if (excludeObjectsBuilder_ != null) { excludeObjectsBuilder_.dispose(); excludeObjectsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ public com.google.cloud.datastream.v1.MongodbCluster.Builder getExcludeObjectsBuilder() { bitField0_ |= 0x00000002; onChanged(); return getExcludeObjectsFieldBuilder().getBuilder(); } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ public com.google.cloud.datastream.v1.MongodbClusterOrBuilder getExcludeObjectsOrBuilder() { if (excludeObjectsBuilder_ != null) { return excludeObjectsBuilder_.getMessageOrBuilder(); } else { return excludeObjects_ == null ? com.google.cloud.datastream.v1.MongodbCluster.getDefaultInstance() : excludeObjects_; } } /** * * * <pre> * MongoDB collections to exclude from the stream. * </pre> * * <code>.google.cloud.datastream.v1.MongodbCluster exclude_objects = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datastream.v1.MongodbCluster, com.google.cloud.datastream.v1.MongodbCluster.Builder, com.google.cloud.datastream.v1.MongodbClusterOrBuilder> getExcludeObjectsFieldBuilder() { if (excludeObjectsBuilder_ == null) { excludeObjectsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datastream.v1.MongodbCluster, com.google.cloud.datastream.v1.MongodbCluster.Builder, com.google.cloud.datastream.v1.MongodbClusterOrBuilder>( getExcludeObjects(), getParentForChildren(), isClean()); excludeObjects_ = null; } return excludeObjectsBuilder_; } private int maxConcurrentBackfillTasks_; /** * * * <pre> * Optional. Maximum number of concurrent backfill tasks. The number should be * non-negative and less than or equal to 50. If not set (or set to 0), the * system's default value is used * </pre> * * <code>int32 max_concurrent_backfill_tasks = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The maxConcurrentBackfillTasks. */ @java.lang.Override public int getMaxConcurrentBackfillTasks() { return maxConcurrentBackfillTasks_; } /** * * * <pre> * Optional. Maximum number of concurrent backfill tasks. The number should be * non-negative and less than or equal to 50. If not set (or set to 0), the * system's default value is used * </pre> * * <code>int32 max_concurrent_backfill_tasks = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The maxConcurrentBackfillTasks to set. * @return This builder for chaining. */ public Builder setMaxConcurrentBackfillTasks(int value) { maxConcurrentBackfillTasks_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Maximum number of concurrent backfill tasks. The number should be * non-negative and less than or equal to 50. If not set (or set to 0), the * system's default value is used * </pre> * * <code>int32 max_concurrent_backfill_tasks = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearMaxConcurrentBackfillTasks() { bitField0_ = (bitField0_ & ~0x00000004); maxConcurrentBackfillTasks_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1.MongodbSourceConfig) } // @@protoc_insertion_point(class_scope:google.cloud.datastream.v1.MongodbSourceConfig) private static final com.google.cloud.datastream.v1.MongodbSourceConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datastream.v1.MongodbSourceConfig(); } public static com.google.cloud.datastream.v1.MongodbSourceConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MongodbSourceConfig> PARSER = new com.google.protobuf.AbstractParser<MongodbSourceConfig>() { @java.lang.Override public MongodbSourceConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MongodbSourceConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MongodbSourceConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datastream.v1.MongodbSourceConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hive
36,811
service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProgressUpdateResp.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hive.service.rpc.thrift; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") @org.apache.hadoop.hive.common.classification.InterfaceAudience.Public @org.apache.hadoop.hive.common.classification.InterfaceStability.Stable public class TProgressUpdateResp implements org.apache.thrift.TBase<TProgressUpdateResp, TProgressUpdateResp._Fields>, java.io.Serializable, Cloneable, Comparable<TProgressUpdateResp> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TProgressUpdateResp"); private static final org.apache.thrift.protocol.TField HEADER_NAMES_FIELD_DESC = new org.apache.thrift.protocol.TField("headerNames", org.apache.thrift.protocol.TType.LIST, (short)1); private static final org.apache.thrift.protocol.TField ROWS_FIELD_DESC = new org.apache.thrift.protocol.TField("rows", org.apache.thrift.protocol.TType.LIST, (short)2); private static final org.apache.thrift.protocol.TField PROGRESSED_PERCENTAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("progressedPercentage", org.apache.thrift.protocol.TType.DOUBLE, (short)3); private static final org.apache.thrift.protocol.TField STATUS_FIELD_DESC = new org.apache.thrift.protocol.TField("status", org.apache.thrift.protocol.TType.I32, (short)4); private static final org.apache.thrift.protocol.TField FOOTER_SUMMARY_FIELD_DESC = new org.apache.thrift.protocol.TField("footerSummary", org.apache.thrift.protocol.TType.STRING, (short)5); private static final org.apache.thrift.protocol.TField START_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("startTime", org.apache.thrift.protocol.TType.I64, (short)6); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new TProgressUpdateRespStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new TProgressUpdateRespTupleSchemeFactory(); private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> headerNames; // required private @org.apache.thrift.annotation.Nullable java.util.List<java.util.List<java.lang.String>> rows; // required private double progressedPercentage; // required private @org.apache.thrift.annotation.Nullable TJobExecutionStatus status; // required private @org.apache.thrift.annotation.Nullable java.lang.String footerSummary; // required private long startTime; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { HEADER_NAMES((short)1, "headerNames"), ROWS((short)2, "rows"), PROGRESSED_PERCENTAGE((short)3, "progressedPercentage"), /** * * @see TJobExecutionStatus */ STATUS((short)4, "status"), FOOTER_SUMMARY((short)5, "footerSummary"), START_TIME((short)6, "startTime"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // HEADER_NAMES return HEADER_NAMES; case 2: // ROWS return ROWS; case 3: // PROGRESSED_PERCENTAGE return PROGRESSED_PERCENTAGE; case 4: // STATUS return STATUS; case 5: // FOOTER_SUMMARY return FOOTER_SUMMARY; case 6: // START_TIME return START_TIME; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __PROGRESSEDPERCENTAGE_ISSET_ID = 0; private static final int __STARTTIME_ISSET_ID = 1; private byte __isset_bitfield = 0; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.HEADER_NAMES, new org.apache.thrift.meta_data.FieldMetaData("headerNames", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); tmpMap.put(_Fields.ROWS, new org.apache.thrift.meta_data.FieldMetaData("rows", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))))); tmpMap.put(_Fields.PROGRESSED_PERCENTAGE, new org.apache.thrift.meta_data.FieldMetaData("progressedPercentage", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE))); tmpMap.put(_Fields.STATUS, new org.apache.thrift.meta_data.FieldMetaData("status", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TJobExecutionStatus.class))); tmpMap.put(_Fields.FOOTER_SUMMARY, new org.apache.thrift.meta_data.FieldMetaData("footerSummary", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.START_TIME, new org.apache.thrift.meta_data.FieldMetaData("startTime", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TProgressUpdateResp.class, metaDataMap); } public TProgressUpdateResp() { } public TProgressUpdateResp( java.util.List<java.lang.String> headerNames, java.util.List<java.util.List<java.lang.String>> rows, double progressedPercentage, TJobExecutionStatus status, java.lang.String footerSummary, long startTime) { this(); this.headerNames = headerNames; this.rows = rows; this.progressedPercentage = progressedPercentage; setProgressedPercentageIsSet(true); this.status = status; this.footerSummary = footerSummary; this.startTime = startTime; setStartTimeIsSet(true); } /** * Performs a deep copy on <i>other</i>. */ public TProgressUpdateResp(TProgressUpdateResp other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetHeaderNames()) { java.util.List<java.lang.String> __this__headerNames = new java.util.ArrayList<java.lang.String>(other.headerNames); this.headerNames = __this__headerNames; } if (other.isSetRows()) { java.util.List<java.util.List<java.lang.String>> __this__rows = new java.util.ArrayList<java.util.List<java.lang.String>>(other.rows.size()); for (java.util.List<java.lang.String> other_element : other.rows) { java.util.List<java.lang.String> __this__rows_copy = new java.util.ArrayList<java.lang.String>(other_element); __this__rows.add(__this__rows_copy); } this.rows = __this__rows; } this.progressedPercentage = other.progressedPercentage; if (other.isSetStatus()) { this.status = other.status; } if (other.isSetFooterSummary()) { this.footerSummary = other.footerSummary; } this.startTime = other.startTime; } public TProgressUpdateResp deepCopy() { return new TProgressUpdateResp(this); } @Override public void clear() { this.headerNames = null; this.rows = null; setProgressedPercentageIsSet(false); this.progressedPercentage = 0.0; this.status = null; this.footerSummary = null; setStartTimeIsSet(false); this.startTime = 0; } public int getHeaderNamesSize() { return (this.headerNames == null) ? 0 : this.headerNames.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<java.lang.String> getHeaderNamesIterator() { return (this.headerNames == null) ? null : this.headerNames.iterator(); } public void addToHeaderNames(java.lang.String elem) { if (this.headerNames == null) { this.headerNames = new java.util.ArrayList<java.lang.String>(); } this.headerNames.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<java.lang.String> getHeaderNames() { return this.headerNames; } public void setHeaderNames(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> headerNames) { this.headerNames = headerNames; } public void unsetHeaderNames() { this.headerNames = null; } /** Returns true if field headerNames is set (has been assigned a value) and false otherwise */ public boolean isSetHeaderNames() { return this.headerNames != null; } public void setHeaderNamesIsSet(boolean value) { if (!value) { this.headerNames = null; } } public int getRowsSize() { return (this.rows == null) ? 0 : this.rows.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<java.util.List<java.lang.String>> getRowsIterator() { return (this.rows == null) ? null : this.rows.iterator(); } public void addToRows(java.util.List<java.lang.String> elem) { if (this.rows == null) { this.rows = new java.util.ArrayList<java.util.List<java.lang.String>>(); } this.rows.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<java.util.List<java.lang.String>> getRows() { return this.rows; } public void setRows(@org.apache.thrift.annotation.Nullable java.util.List<java.util.List<java.lang.String>> rows) { this.rows = rows; } public void unsetRows() { this.rows = null; } /** Returns true if field rows is set (has been assigned a value) and false otherwise */ public boolean isSetRows() { return this.rows != null; } public void setRowsIsSet(boolean value) { if (!value) { this.rows = null; } } public double getProgressedPercentage() { return this.progressedPercentage; } public void setProgressedPercentage(double progressedPercentage) { this.progressedPercentage = progressedPercentage; setProgressedPercentageIsSet(true); } public void unsetProgressedPercentage() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __PROGRESSEDPERCENTAGE_ISSET_ID); } /** Returns true if field progressedPercentage is set (has been assigned a value) and false otherwise */ public boolean isSetProgressedPercentage() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __PROGRESSEDPERCENTAGE_ISSET_ID); } public void setProgressedPercentageIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __PROGRESSEDPERCENTAGE_ISSET_ID, value); } /** * * @see TJobExecutionStatus */ @org.apache.thrift.annotation.Nullable public TJobExecutionStatus getStatus() { return this.status; } /** * * @see TJobExecutionStatus */ public void setStatus(@org.apache.thrift.annotation.Nullable TJobExecutionStatus status) { this.status = status; } public void unsetStatus() { this.status = null; } /** Returns true if field status is set (has been assigned a value) and false otherwise */ public boolean isSetStatus() { return this.status != null; } public void setStatusIsSet(boolean value) { if (!value) { this.status = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getFooterSummary() { return this.footerSummary; } public void setFooterSummary(@org.apache.thrift.annotation.Nullable java.lang.String footerSummary) { this.footerSummary = footerSummary; } public void unsetFooterSummary() { this.footerSummary = null; } /** Returns true if field footerSummary is set (has been assigned a value) and false otherwise */ public boolean isSetFooterSummary() { return this.footerSummary != null; } public void setFooterSummaryIsSet(boolean value) { if (!value) { this.footerSummary = null; } } public long getStartTime() { return this.startTime; } public void setStartTime(long startTime) { this.startTime = startTime; setStartTimeIsSet(true); } public void unsetStartTime() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __STARTTIME_ISSET_ID); } /** Returns true if field startTime is set (has been assigned a value) and false otherwise */ public boolean isSetStartTime() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __STARTTIME_ISSET_ID); } public void setStartTimeIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __STARTTIME_ISSET_ID, value); } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case HEADER_NAMES: if (value == null) { unsetHeaderNames(); } else { setHeaderNames((java.util.List<java.lang.String>)value); } break; case ROWS: if (value == null) { unsetRows(); } else { setRows((java.util.List<java.util.List<java.lang.String>>)value); } break; case PROGRESSED_PERCENTAGE: if (value == null) { unsetProgressedPercentage(); } else { setProgressedPercentage((java.lang.Double)value); } break; case STATUS: if (value == null) { unsetStatus(); } else { setStatus((TJobExecutionStatus)value); } break; case FOOTER_SUMMARY: if (value == null) { unsetFooterSummary(); } else { setFooterSummary((java.lang.String)value); } break; case START_TIME: if (value == null) { unsetStartTime(); } else { setStartTime((java.lang.Long)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case HEADER_NAMES: return getHeaderNames(); case ROWS: return getRows(); case PROGRESSED_PERCENTAGE: return getProgressedPercentage(); case STATUS: return getStatus(); case FOOTER_SUMMARY: return getFooterSummary(); case START_TIME: return getStartTime(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case HEADER_NAMES: return isSetHeaderNames(); case ROWS: return isSetRows(); case PROGRESSED_PERCENTAGE: return isSetProgressedPercentage(); case STATUS: return isSetStatus(); case FOOTER_SUMMARY: return isSetFooterSummary(); case START_TIME: return isSetStartTime(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof TProgressUpdateResp) return this.equals((TProgressUpdateResp)that); return false; } public boolean equals(TProgressUpdateResp that) { if (that == null) return false; if (this == that) return true; boolean this_present_headerNames = true && this.isSetHeaderNames(); boolean that_present_headerNames = true && that.isSetHeaderNames(); if (this_present_headerNames || that_present_headerNames) { if (!(this_present_headerNames && that_present_headerNames)) return false; if (!this.headerNames.equals(that.headerNames)) return false; } boolean this_present_rows = true && this.isSetRows(); boolean that_present_rows = true && that.isSetRows(); if (this_present_rows || that_present_rows) { if (!(this_present_rows && that_present_rows)) return false; if (!this.rows.equals(that.rows)) return false; } boolean this_present_progressedPercentage = true; boolean that_present_progressedPercentage = true; if (this_present_progressedPercentage || that_present_progressedPercentage) { if (!(this_present_progressedPercentage && that_present_progressedPercentage)) return false; if (this.progressedPercentage != that.progressedPercentage) return false; } boolean this_present_status = true && this.isSetStatus(); boolean that_present_status = true && that.isSetStatus(); if (this_present_status || that_present_status) { if (!(this_present_status && that_present_status)) return false; if (!this.status.equals(that.status)) return false; } boolean this_present_footerSummary = true && this.isSetFooterSummary(); boolean that_present_footerSummary = true && that.isSetFooterSummary(); if (this_present_footerSummary || that_present_footerSummary) { if (!(this_present_footerSummary && that_present_footerSummary)) return false; if (!this.footerSummary.equals(that.footerSummary)) return false; } boolean this_present_startTime = true; boolean that_present_startTime = true; if (this_present_startTime || that_present_startTime) { if (!(this_present_startTime && that_present_startTime)) return false; if (this.startTime != that.startTime) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetHeaderNames()) ? 131071 : 524287); if (isSetHeaderNames()) hashCode = hashCode * 8191 + headerNames.hashCode(); hashCode = hashCode * 8191 + ((isSetRows()) ? 131071 : 524287); if (isSetRows()) hashCode = hashCode * 8191 + rows.hashCode(); hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(progressedPercentage); hashCode = hashCode * 8191 + ((isSetStatus()) ? 131071 : 524287); if (isSetStatus()) hashCode = hashCode * 8191 + status.getValue(); hashCode = hashCode * 8191 + ((isSetFooterSummary()) ? 131071 : 524287); if (isSetFooterSummary()) hashCode = hashCode * 8191 + footerSummary.hashCode(); hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(startTime); return hashCode; } @Override public int compareTo(TProgressUpdateResp other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetHeaderNames(), other.isSetHeaderNames()); if (lastComparison != 0) { return lastComparison; } if (isSetHeaderNames()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.headerNames, other.headerNames); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetRows(), other.isSetRows()); if (lastComparison != 0) { return lastComparison; } if (isSetRows()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rows, other.rows); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetProgressedPercentage(), other.isSetProgressedPercentage()); if (lastComparison != 0) { return lastComparison; } if (isSetProgressedPercentage()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.progressedPercentage, other.progressedPercentage); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetStatus(), other.isSetStatus()); if (lastComparison != 0) { return lastComparison; } if (isSetStatus()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.status, other.status); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetFooterSummary(), other.isSetFooterSummary()); if (lastComparison != 0) { return lastComparison; } if (isSetFooterSummary()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.footerSummary, other.footerSummary); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetStartTime(), other.isSetStartTime()); if (lastComparison != 0) { return lastComparison; } if (isSetStartTime()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.startTime, other.startTime); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("TProgressUpdateResp("); boolean first = true; sb.append("headerNames:"); if (this.headerNames == null) { sb.append("null"); } else { sb.append(this.headerNames); } first = false; if (!first) sb.append(", "); sb.append("rows:"); if (this.rows == null) { sb.append("null"); } else { sb.append(this.rows); } first = false; if (!first) sb.append(", "); sb.append("progressedPercentage:"); sb.append(this.progressedPercentage); first = false; if (!first) sb.append(", "); sb.append("status:"); if (this.status == null) { sb.append("null"); } else { sb.append(this.status); } first = false; if (!first) sb.append(", "); sb.append("footerSummary:"); if (this.footerSummary == null) { sb.append("null"); } else { sb.append(this.footerSummary); } first = false; if (!first) sb.append(", "); sb.append("startTime:"); sb.append(this.startTime); first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetHeaderNames()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'headerNames' is unset! Struct:" + toString()); } if (!isSetRows()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'rows' is unset! Struct:" + toString()); } if (!isSetProgressedPercentage()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'progressedPercentage' is unset! Struct:" + toString()); } if (!isSetStatus()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'status' is unset! Struct:" + toString()); } if (!isSetFooterSummary()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'footerSummary' is unset! Struct:" + toString()); } if (!isSetStartTime()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'startTime' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TProgressUpdateRespStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public TProgressUpdateRespStandardScheme getScheme() { return new TProgressUpdateRespStandardScheme(); } } private static class TProgressUpdateRespStandardScheme extends org.apache.thrift.scheme.StandardScheme<TProgressUpdateResp> { public void read(org.apache.thrift.protocol.TProtocol iprot, TProgressUpdateResp struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // HEADER_NAMES if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list200 = iprot.readListBegin(); struct.headerNames = new java.util.ArrayList<java.lang.String>(_list200.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem201; for (int _i202 = 0; _i202 < _list200.size; ++_i202) { _elem201 = iprot.readString(); struct.headerNames.add(_elem201); } iprot.readListEnd(); } struct.setHeaderNamesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // ROWS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list203 = iprot.readListBegin(); struct.rows = new java.util.ArrayList<java.util.List<java.lang.String>>(_list203.size); @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> _elem204; for (int _i205 = 0; _i205 < _list203.size; ++_i205) { { org.apache.thrift.protocol.TList _list206 = iprot.readListBegin(); _elem204 = new java.util.ArrayList<java.lang.String>(_list206.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem207; for (int _i208 = 0; _i208 < _list206.size; ++_i208) { _elem207 = iprot.readString(); _elem204.add(_elem207); } iprot.readListEnd(); } struct.rows.add(_elem204); } iprot.readListEnd(); } struct.setRowsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // PROGRESSED_PERCENTAGE if (schemeField.type == org.apache.thrift.protocol.TType.DOUBLE) { struct.progressedPercentage = iprot.readDouble(); struct.setProgressedPercentageIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // STATUS if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.status = org.apache.hive.service.rpc.thrift.TJobExecutionStatus.findByValue(iprot.readI32()); struct.setStatusIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // FOOTER_SUMMARY if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.footerSummary = iprot.readString(); struct.setFooterSummaryIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // START_TIME if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.startTime = iprot.readI64(); struct.setStartTimeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TProgressUpdateResp struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.headerNames != null) { oprot.writeFieldBegin(HEADER_NAMES_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.headerNames.size())); for (java.lang.String _iter209 : struct.headerNames) { oprot.writeString(_iter209); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.rows != null) { oprot.writeFieldBegin(ROWS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.LIST, struct.rows.size())); for (java.util.List<java.lang.String> _iter210 : struct.rows) { { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, _iter210.size())); for (java.lang.String _iter211 : _iter210) { oprot.writeString(_iter211); } oprot.writeListEnd(); } } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldBegin(PROGRESSED_PERCENTAGE_FIELD_DESC); oprot.writeDouble(struct.progressedPercentage); oprot.writeFieldEnd(); if (struct.status != null) { oprot.writeFieldBegin(STATUS_FIELD_DESC); oprot.writeI32(struct.status.getValue()); oprot.writeFieldEnd(); } if (struct.footerSummary != null) { oprot.writeFieldBegin(FOOTER_SUMMARY_FIELD_DESC); oprot.writeString(struct.footerSummary); oprot.writeFieldEnd(); } oprot.writeFieldBegin(START_TIME_FIELD_DESC); oprot.writeI64(struct.startTime); oprot.writeFieldEnd(); oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TProgressUpdateRespTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public TProgressUpdateRespTupleScheme getScheme() { return new TProgressUpdateRespTupleScheme(); } } private static class TProgressUpdateRespTupleScheme extends org.apache.thrift.scheme.TupleScheme<TProgressUpdateResp> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TProgressUpdateResp struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; { oprot.writeI32(struct.headerNames.size()); for (java.lang.String _iter212 : struct.headerNames) { oprot.writeString(_iter212); } } { oprot.writeI32(struct.rows.size()); for (java.util.List<java.lang.String> _iter213 : struct.rows) { { oprot.writeI32(_iter213.size()); for (java.lang.String _iter214 : _iter213) { oprot.writeString(_iter214); } } } } oprot.writeDouble(struct.progressedPercentage); oprot.writeI32(struct.status.getValue()); oprot.writeString(struct.footerSummary); oprot.writeI64(struct.startTime); } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TProgressUpdateResp struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; { org.apache.thrift.protocol.TList _list215 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING); struct.headerNames = new java.util.ArrayList<java.lang.String>(_list215.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem216; for (int _i217 = 0; _i217 < _list215.size; ++_i217) { _elem216 = iprot.readString(); struct.headerNames.add(_elem216); } } struct.setHeaderNamesIsSet(true); { org.apache.thrift.protocol.TList _list218 = iprot.readListBegin(org.apache.thrift.protocol.TType.LIST); struct.rows = new java.util.ArrayList<java.util.List<java.lang.String>>(_list218.size); @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> _elem219; for (int _i220 = 0; _i220 < _list218.size; ++_i220) { { org.apache.thrift.protocol.TList _list221 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING); _elem219 = new java.util.ArrayList<java.lang.String>(_list221.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem222; for (int _i223 = 0; _i223 < _list221.size; ++_i223) { _elem222 = iprot.readString(); _elem219.add(_elem222); } } struct.rows.add(_elem219); } } struct.setRowsIsSet(true); struct.progressedPercentage = iprot.readDouble(); struct.setProgressedPercentageIsSet(true); struct.status = org.apache.hive.service.rpc.thrift.TJobExecutionStatus.findByValue(iprot.readI32()); struct.setStatusIsSet(true); struct.footerSummary = iprot.readString(); struct.setFooterSummaryIsSet(true); struct.startTime = iprot.readI64(); struct.setStartTimeIsSet(true); } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
apache/incubator-retired-wave
36,871
wave/src/test/java/org/waveprotocol/wave/model/conversation/ConversationTestBase.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.model.conversation; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.atMost; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.waveprotocol.wave.model.testing.ExtraAsserts.assertStructureEquivalent; import junit.framework.TestCase; import org.mockito.InOrder; import org.waveprotocol.wave.model.conversation.Conversation.Anchor; import org.waveprotocol.wave.model.conversation.ConversationBlip.LocatedReplyThread; import org.waveprotocol.wave.model.document.MutableDocument; import org.waveprotocol.wave.model.document.MutableDocument.Action; import org.waveprotocol.wave.model.document.util.DocHelper; import org.waveprotocol.wave.model.document.util.DocIterate; import org.waveprotocol.wave.model.document.util.LineContainers; import org.waveprotocol.wave.model.document.util.Point; import org.waveprotocol.wave.model.document.util.XmlStringBuilder; import org.waveprotocol.wave.model.operation.wave.WorthyChangeChecker; import org.waveprotocol.wave.model.util.CollectionUtils; import org.waveprotocol.wave.model.wave.ParticipantId; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Tests for abstract conversation, thread and blip interfaces. * * @author anorth@google.com (Alex North) */ public abstract class ConversationTestBase extends TestCase { private ObservableConversation target; private ObservableConversation alternate; protected ObservableConversation.Listener convListener; @Override protected void setUp() throws Exception { super.setUp(); target = makeConversation(); alternate = makeConversation(); convListener = mock(ObservableConversation.Listener.class); } /** * Creates a new, empty conversation object. All created conversations must be * from the same conversation view. */ protected abstract ObservableConversation makeConversation(); /** * Creates a new conversation object backed by the same data as another. * Changes made to one conversation should trigger events in the other. */ protected abstract ObservableConversation mirrorConversation(ObservableConversation toMirror); /** Checks that a blip is still valid. */ protected abstract void assertBlipValid(ConversationBlip blip); /** Checks that a blip is invalid. */ protected abstract void assertBlipInvalid(ConversationBlip blip); /** Checks that a thread is invalid. */ protected abstract void assertThreadInvalid(ConversationThread thread); /** Checks that a thread is valid. */ protected abstract void assertThreadValid(ConversationThread thread); // // Anchoring // public void testEmptyConversationIsNotAnchored() { assertFalse(target.hasAnchor()); } public void testCreateAnchor() { populate(alternate); ConversationBlip blip = getFirstBlip(alternate); Anchor anchor = alternate.createAnchor(blip); assertTrue(alternate == anchor.getConversation()); assertTrue(blip == anchor.getBlip()); } public void testSetAnchor() { populate(alternate); ConversationBlip blip = getFirstBlip(alternate); Anchor anchor = alternate.createAnchor(blip); target.setAnchor(anchor); assertTrue(target.hasAnchor()); assertEquals(anchor, target.getAnchor()); } public void testAnchorToSelfFails() { populate(target); ConversationBlip blip = getFirstBlip(target); Anchor anchor = target.createAnchor(blip); try { target.setAnchor(anchor); fail("Expected an IllegalArgumentException"); } catch (IllegalArgumentException expected) { } assertFalse(target.hasAnchor()); } public void testClearAnchor() { populate(alternate); ConversationBlip blip = getFirstBlip(alternate); Anchor anchor = alternate.createAnchor(blip); target.setAnchor(anchor); target.setAnchor(null); assertFalse(target.hasAnchor()); } // Regression test for a bug where the manifest was forgotten after // any element removed. public void testAnchorStillAccessibleAfterBlipAdded() { populate(alternate); ConversationBlip blip = getFirstBlip(alternate); Anchor anchor = alternate.createAnchor(blip); target.setAnchor(anchor); target.getRootThread().appendBlip(); target.getRootThread().getFirstBlip().delete(); assertTrue(target.hasAnchor()); assertEquals(anchor, target.getAnchor()); } // // Participants. // public void testAddedParticipantIsRetreived() { ParticipantId creator = target.getParticipantIds().iterator().next(); ParticipantId fake = new ParticipantId("bill@foo.com"); target.addParticipant(fake); assertEquals(Arrays.asList(creator, fake), CollectionUtils.newArrayList(target.getParticipantIds())); } public void testRemovedParticipantNoLongerRetrieved() { ParticipantId creator = target.getParticipantIds().iterator().next(); ParticipantId fake = new ParticipantId("bill@foo.com"); target.addParticipant(fake); target.removeParticipant(fake); assertEquals(Collections.singletonList(creator), CollectionUtils.newArrayList(target.getParticipantIds())); } public void testParticipantsAreASet() { ParticipantId creator = target.getParticipantIds().iterator().next(); ParticipantId fake1 = new ParticipantId("joe"); ParticipantId fake2 = new ParticipantId("bill"); List<ParticipantId> participants = CollectionUtils.newArrayList(creator, fake1, fake2); target.addParticipant(fake1); target.addParticipant(fake2); assertEquals(participants, CollectionUtils.newArrayList(target.getParticipantIds())); target.addParticipant(fake2); assertEquals(participants, CollectionUtils.newArrayList(target.getParticipantIds())); } // // Threads and blips // public void testEmptyRootThreadHasNoBlips() { assertNotNull(target.getRootThread()); assertSame(target, target.getRootThread().getConversation()); assertNull(target.getRootThread().getFirstBlip()); assertNull(target.getRootThread().getParentBlip()); } public void testAppendBlipAppendsBlipsToThread() { ConversationThread thread = target.getRootThread(); ConversationBlip b1 = thread.appendBlip(); ConversationBlip b2 = thread.appendBlip(); ConversationBlip b3 = thread.appendBlip(); assertSame(b1, thread.getFirstBlip()); assertSame(target, b1.getConversation()); assertSame(thread, b1.getThread()); assertSame(b1, target.getBlip(b1.getId())); assertEquals(Arrays.asList(b1, b2, b3), getBlipList(thread)); } public void testInsetBlipBeforeFirstBlipCreatesNewFirstBlip() { ConversationThread thread = target.getRootThread(); ConversationBlip oldFirst = thread.appendBlip(); ConversationBlip newFirst = thread.insertBlip(oldFirst); assertSame(thread.getFirstBlip(), newFirst); assertSame(newFirst, target.getBlip(newFirst.getId())); assertSame(oldFirst, target.getBlip(oldFirst.getId())); assertEquals(Arrays.asList(newFirst, oldFirst), getBlipList(thread)); } public void testInsertBlipBetweenBlipsInserts() { ConversationThread thread = target.getRootThread(); ConversationBlip first = thread.appendBlip(); ConversationBlip last = thread.appendBlip(); ConversationBlip middle = thread.insertBlip(last); assertEquals(Arrays.asList(first, middle, last), getBlipList(thread)); } public void testAppendRepliesAppendsRepliesToBlip() { ConversationBlip blip = target.getRootThread().appendBlip(); ConversationThread t1 = blip.addReplyThread(); // Append blips to get a new ID for the next thread. t1.appendBlip(); ConversationThread t2 = blip.addReplyThread(); t2.appendBlip(); ConversationThread t3 = blip.addReplyThread(); t3.appendBlip(); assertSame(blip, t1.getParentBlip()); assertEquals(Arrays.asList(t1, t2, t3), CollectionUtils.newArrayList(blip.getReplyThreads())); assertThreadChildrenConsistent(blip); } public void testAppendInlineReplyCreatesInlineThread() { ConversationBlip blip = target.getRootThread().appendBlip(); MutableDocument<?, ?, ?> doc = blip.getContent(); int location = locateAfterLineElement(doc); ConversationThread thread = blip.addReplyThread(location); assertSame(blip, thread.getParentBlip()); assertEquals(Collections.singletonList(LocatedReplyThread.of(thread, location)), blip.locateReplyThreads()); assertThreadChildrenConsistent(blip); } public void testInlineReplyWithMultipleAnchorsUsesFirst() { ConversationBlip blip = target.getRootThread().appendBlip(); MutableDocument<?, ?, ?> doc = blip.getContent(); final int location = locateAfterLineElement(doc); ConversationThread thread = blip.addReplyThread(location); // Duplicate the anchor. doc.with(new Action() { public <N, E extends N, T extends N> void exec(MutableDocument<N, E, T> doc) { E anchor = Point.elementAfter(doc, doc.locate(location)); E anchorParent = doc.getParentElement(anchor); doc.createChildElement(anchorParent, doc.getTagName(anchor), doc.getAttributes(anchor)); } }); assertEquals(Collections.singletonList(LocatedReplyThread.of(thread, location)), blip.locateReplyThreads()); } public void testInlineReplyPointUpdatesWithDocContent() { final ConversationBlip blip = target.getRootThread().appendBlip(); MutableDocument<?, ?, ?> doc = blip.getContent(); doc.with(new Action() { public <N, E extends N, T extends N> void exec(MutableDocument<N, E, T> doc) { Point<N> startText = doc.locate(locateAfterLineElement(doc)); doc.insertText(startText, "cd"); // Insert reply between c|d. N bodyNode = DocHelper.getElementWithTagName(doc, Blips.BODY_TAGNAME); N textNode = doc.getFirstChild(bodyNode); textNode = doc.getNextSibling(textNode); int replyLocation = doc.getLocation(Point.inText(textNode, 1)); blip.addReplyThread(replyLocation); // Insert text to give abc|d startText = Point.before(doc, textNode); doc.insertText(startText, "ab"); int newLocation = blip.locateReplyThreads().iterator().next().getLocation(); assertEquals(replyLocation + 2, newLocation); } }); } public void testInlineReplyWithDeletedAnchorHasInvalidLocation() { final ConversationBlip blip = target.getRootThread().appendBlip(); MutableDocument<?, ?, ?> doc = blip.getContent(); doc.with(new Action() { public <N, E extends N, T extends N> void exec(MutableDocument<N, E, T> doc) { Point<N> startText = doc.locate(locateAfterLineElement(doc)); doc.insertText(startText, "cd"); // Insert reply between c|d. N bodyNode = DocHelper.getElementWithTagName(doc, Blips.BODY_TAGNAME); N textNode = doc.getFirstChild(bodyNode); textNode = doc.getNextSibling(textNode); int replyLocation = doc.getLocation(Point.inText(textNode, 1)); ConversationThread replyThread = blip.addReplyThread(replyLocation); // Delete text and anchor. doc.deleteRange(Point.before(doc, textNode), Point.inElement(bodyNode, null)); int newLocation = blip.locateReplyThreads().iterator().next().getLocation(); assertEquals(Blips.INVALID_INLINE_LOCATION, newLocation); } }); } public void testInlineRepliesInLocationOrder() { final ConversationBlip blip = target.getRootThread().appendBlip(); MutableDocument<?, ?, ?> doc = blip.getContent(); doc.with(new Action() { public <N, E extends N, T extends N> void exec(MutableDocument<N, E, T> doc) { Point<N> startText = doc.locate(locateAfterLineElement(doc)); int replyLocation = doc.getLocation(startText); ConversationThread t1 = blip.addReplyThread(replyLocation); t1.appendBlip(); // In front of t1. ConversationThread t2 = blip.addReplyThread(replyLocation); t2.appendBlip(); // In front of the others. ConversationThread t3 = blip.addReplyThread(replyLocation); t3.appendBlip(); // Delete t3's anchor. E anchorToDelete = Point.elementAfter(doc, doc.locate(replyLocation)); doc.deleteNode(anchorToDelete); List<LocatedReplyThread<ConversationThread>> expected = new ArrayList<LocatedReplyThread<ConversationThread>>(); expected.add(LocatedReplyThread.of(t2, replyLocation)); expected.add(LocatedReplyThread.of(t1, replyLocation + 2)); expected.add(LocatedReplyThread.of(t3, Blips.INVALID_INLINE_LOCATION)); List<LocatedReplyThread<? extends ConversationThread>> threads = CollectionUtils.newArrayList(blip.locateReplyThreads()); assertEquals(expected, threads); } }); } public void testDeleteSingleRootThreadBlipRemovesIt() { ConversationBlip blip = target.getRootThread().appendBlip(); blip.delete(); assertNull(target.getRootThread().getFirstBlip()); assertBlipInvalid(blip); } public void testDeleteSingleNonRootThreadBlipRemovesIt() { ConversationThread thread = target.getRootThread().appendBlip().addReplyThread(); ConversationBlip unDeleted = thread.appendBlip(); ConversationBlip blip = thread.appendBlip(); blip.delete(); assertEquals(Arrays.asList(unDeleted), getBlipList(thread)); assertBlipInvalid(blip); } public void testCanAppendAfterDeletingOnlyRootThreadBlip() { ConversationBlip first = target.getRootThread().appendBlip(); first.delete(); ConversationBlip second = target.getRootThread().appendBlip(); assertBlipInvalid(first); assertBlipValid(second); } public void testCanAppendAfterDeletingRootThreadReplies() { ConversationBlip first = target.getRootThread().appendBlip(); ConversationBlip second = target.getRootThread().appendBlip(); ConversationThread reply = first.addReplyThread(); reply.appendBlip(); second.delete(); first.delete(); ConversationBlip newFirst = target.getRootThread().appendBlip(); assertBlipValid(newFirst); } public void testDeleteBlipInThreadLeavesSiblings() { ConversationBlip b1 = target.getRootThread().appendBlip(); ConversationBlip b2 = target.getRootThread().appendBlip(); ConversationBlip b3 = target.getRootThread().appendBlip(); b2.delete(); assertEquals(Arrays.asList(b1, b3), getBlipList(target.getRootThread())); b1.delete(); assertEquals(Arrays.asList(b3), getBlipList(target.getRootThread())); } public void testDeleteBlipWithInlineReplyDeletesReply() { ConversationBlip blip = target.getRootThread().appendBlip(); MutableDocument<?, ?, ?> doc = blip.getContent(); ConversationThread reply = blip.addReplyThread(locateAfterLineElement(doc)); ConversationBlip replyBlip = reply.appendBlip(); blip.delete(); assertNull(target.getRootThread().getFirstBlip()); assertThreadInvalid(reply); assertBlipInvalid(replyBlip); } public void testDeleteBlipWithManyRepliesDeletesReplies() { ConversationBlip blip = target.getRootThread().appendBlip(); MutableDocument<?, ?, ?> doc = blip.getContent(); ConversationThread reply1 = blip.addReplyThread(); // Append blips to get a new ID for the next thread. reply1.appendBlip(); ConversationThread inlineReply1 = blip.addReplyThread(locateAfterLineElement(doc)); inlineReply1.appendBlip(); ConversationThread reply2 = blip.addReplyThread(); reply2.appendBlip(); ConversationThread inlineReply2 = blip.addReplyThread(locateAfterLineElement(doc)); inlineReply2.appendBlip(); blip.delete(); assertNull(target.getRootThread().getFirstBlip()); assertBlipInvalid(blip); assertThreadInvalid(reply1); assertThreadInvalid(reply2); assertThreadInvalid(inlineReply1); assertThreadInvalid(inlineReply2); } public void testDeletedConversationIsUnusable() { target.delete(); assertConversationAccessible(target); assertConversationUnusable(target); } public void testDeleteConversationInvalidatesBlips() { target.getRootThread(); ObservableConversationBlip blip1 = target.getRootThread().appendBlip(); ObservableConversationBlip blip2 = target.getRootThread().appendBlip(); target.addListener(convListener); target.delete(); assertConversationUnusable(target); assertBlipInvalid(blip1); assertBlipInvalid(blip2); verify(convListener).onBlipDeleted(blip1); verify(convListener).onBlipDeleted(blip2); } public void testDeleteConversationInvalidatesNonRootThreads() { ObservableConversationBlip outerBlip = target.getRootThread().appendBlip(); ObservableConversationThread inlineThread = outerBlip.addReplyThread(locateAfterLineElement(outerBlip.getContent())); ObservableConversationBlip innerBlip = inlineThread.appendBlip(); target.addListener(convListener); target.delete(); assertBlipInvalid(outerBlip); assertBlipInvalid(innerBlip); assertThreadInvalid(inlineThread); } public void testDeleteConversationEvents() { ObservableConversationBlip outerBlip = target.getRootThread().appendBlip(); ObservableConversationThread inlineThread = outerBlip.addReplyThread(locateAfterLineElement(outerBlip.getContent())); ObservableConversationBlip innerBlip = inlineThread.appendBlip(); target.addListener(convListener); target.delete(); assertBlipInvalid(outerBlip); assertBlipInvalid(innerBlip); assertThreadInvalid(inlineThread); verify(convListener).onBlipDeleted(innerBlip); verify(convListener).onThreadDeleted(inlineThread); verify(convListener).onBlipDeleted(outerBlip); verifyNoMoreInteractions(convListener); } /** * Tests that non-inline replies to an inline reply are deleted * completely when the inline reply's parent blip is deleted. No * tombstones remain. */ public void testDeleteBlipDeletesRepliesToInlineReply() { ConversationBlip blip = target.getRootThread().appendBlip(); ConversationThread inlineReply = blip.addReplyThread(locateAfterLineElement( blip.getContent())); ConversationBlip inlineReplyBlip = inlineReply.appendBlip(); ConversationThread nonInlineReplyToReply = inlineReplyBlip.addReplyThread(); ConversationBlip nonInlineReplyBlip = nonInlineReplyToReply.appendBlip(); blip.delete(); assertNull(target.getRootThread().getFirstBlip()); assertBlipInvalid(nonInlineReplyBlip); assertThreadInvalid(nonInlineReplyToReply); assertBlipInvalid(inlineReplyBlip); assertThreadInvalid(inlineReply); } public void testDeleteLastBlipInNonRootThreadDeletesThread() { ConversationBlip blip = target.getRootThread().appendBlip(); ConversationThread replyThread = blip.addReplyThread(); ConversationBlip replyBlip = replyThread.appendBlip(); replyBlip.delete(); assertFalse(blip.getReplyThreads().iterator().hasNext()); assertThreadChildrenConsistent(blip); assertThreadInvalid(replyThread); } // Bug 2220263. public void testCanReplyAfterDeletingReplyThread() { ConversationThread topThread = target.getRootThread().appendBlip().addReplyThread(); ConversationBlip topBlip = topThread.appendBlip(); // Add two reply threads. Delete the second (by deleting its blip). ConversationThread firstReply = topBlip.addReplyThread(); firstReply.appendBlip(); ConversationThread secondReply = topBlip.addReplyThread(); secondReply.appendBlip().delete(); // Reply again. This used to throw IndexOutOfBounds. ConversationThread replacementReply = topBlip.addReplyThread(); ConversationBlip replacementBlip = replacementReply.appendBlip(); assertBlipValid(replacementBlip); assertEquals(Arrays.asList(firstReply, replacementReply), CollectionUtils.newArrayList(topBlip.getReplyThreads())); } public void testDeleteInlineReplyDeletesAnchor() { ConversationBlip blip = target.getRootThread().appendBlip(); XmlStringBuilder xmlBefore = XmlStringBuilder.innerXml(blip.getContent()); ConversationThread inlineReply = blip.addReplyThread(locateAfterLineElement( blip.getContent())); ConversationBlip inlineReplyBlip = inlineReply.appendBlip(); inlineReplyBlip.delete(); assertBlipInvalid(inlineReplyBlip); assertThreadInvalid(inlineReply); assertStructureEquivalent(xmlBefore, blip.getContent()); } public void testDeleteRootThreadRemovesAllBlips() { ConversationThread rootThread = target.getRootThread(); ConversationBlip first = rootThread.appendBlip(); ConversationBlip second = rootThread.appendBlip(); rootThread.delete(); assertBlipInvalid(first); assertBlipInvalid(second); assertEquals(CollectionUtils.newArrayList(), getBlipList(rootThread)); assertThreadValid(rootThread); } public void testDeleteNonRootThreadRemovesAllBlipsAndThread() { ConversationBlip blip = target.getRootThread().appendBlip(); ConversationThread replyThread = blip.addReplyThread(); ConversationBlip replyBlip1 = replyThread.appendBlip(); ConversationBlip replyBlip2 = replyThread.appendBlip(); replyThread.delete(); assertFalse(blip.getReplyThreads().iterator().hasNext()); assertThreadChildrenConsistent(blip); assertBlipInvalid(replyBlip1); assertBlipInvalid(replyBlip2); assertThreadInvalid(replyThread); } public void testDeleteEmptyThread() { ConversationBlip blip = target.getRootThread().appendBlip(); ConversationThread replyThread = blip.addReplyThread(); replyThread.delete(); assertFalse(blip.getReplyThreads().iterator().hasNext()); assertThreadChildrenConsistent(blip); assertThreadInvalid(replyThread); } /** * Tests that methods which access the state of a blip without changing it * are correct after blip deletion. */ public void testBlipCanBeAccessedAfterDeletion() { ConversationBlip blip = target.getRootThread().appendBlip(); blip.delete(); assertBlipInvalid(blip); assertBlipAccessible(blip); assertEquals(target.getRootThread(), blip.getThread()); assertEquals(Collections.emptyList(), getBlipList(target.getRootThread())); assertEquals(Collections.emptyList(), getAllReplyList(blip)); } /** * Tests that methods which access the state of a blip (this time with a * child thread) without changing it are correct after blip deletion. */ public void testBlipWithThreadCanBeAccessedAfterDeletion() { ConversationBlip blip = target.getRootThread().appendBlip(); ConversationThread thread = blip.addReplyThread(); blip.delete(); assertBlipInvalid(blip); assertBlipAccessible(blip); assertEquals(target.getRootThread(), blip.getThread()); assertEquals(Collections.emptyList(), getBlipList(target.getRootThread())); assertEquals(blip, thread.getParentBlip()); assertEquals(Collections.emptyList(), getAllReplyList(blip)); } /** * Tests that methods which access the state of a thread without changing it * are correct after thread deletion. */ public void testThreadCanBeAccessedAfterDeletion() { ConversationBlip blip = target.getRootThread().appendBlip(); ConversationThread thread = blip.addReplyThread(); ConversationBlip replyBlip = thread.appendBlip(); thread.delete(); assertBlipInvalid(replyBlip); assertBlipAccessible(replyBlip); assertThreadInvalid(thread); assertThreadAccessible(thread); assertEquals(blip, thread.getParentBlip()); assertFalse(blip.getReplyThreads().iterator().hasNext()); assertEquals(thread, replyBlip.getThread()); assertEquals(Collections.emptyList(), getBlipList(thread)); } // // Tests for ObservableConversation. // public void testSetAnchorEventsAreFired() { populate(alternate); ObservableConversation.AnchorListener listener = mock(ObservableConversation.AnchorListener.class); target.addListener(listener); Anchor anchor1 = alternate.createAnchor(getFirstBlip(alternate)); // Set anchor from null. target.setAnchor(anchor1); verify(listener).onAnchorChanged(null, anchor1); // Change anchor to different blip. Anchor anchor11 = alternate.createAnchor(alternate.getRootThread().getFirstBlip() .getReplyThreads().iterator().next().getFirstBlip()); target.setAnchor(anchor11); verify(listener).onAnchorChanged(anchor1, anchor11); // Change anchor to different wavelet. ObservableConversation alternate2 = makeConversation(); populate(alternate2); Anchor anchor2 = alternate2.createAnchor(getFirstBlip(alternate2)); target.setAnchor(anchor2); verify(listener).onAnchorChanged(anchor11, anchor2); // Set anchor to null. target.setAnchor(null); verify(listener).onAnchorChanged(anchor2, null); // Remove listener. target.removeListener(listener); target.setAnchor(anchor1); verifyNoMoreInteractions(listener); } // These methods test that local modifications cause events via the // blip and thread listeners. They test that modifications to the underlying // data cause events via the conversation listener on a mirror conversation. public void testParticipantChangesFireEvents() { ParticipantId p1 = new ParticipantId("someone@example.com"); ParticipantId p2 = new ParticipantId("else@example.com"); ObservableConversation mirror = mirrorConversation(target); mirror.addListener(convListener); target.addParticipant(p1); target.addParticipant(p2); verify(convListener).onParticipantAdded(p1); verify(convListener).onParticipantAdded(p2); target.addParticipant(p1); verifyNoMoreInteractions(convListener); target.removeParticipant(p2); verify(convListener).onParticipantRemoved(p2); } public void testThreadAppendInsertBlipFiresEvent() { ObservableConversation mirror = mirrorConversation(target); mirror.addListener(convListener); ObservableConversationBlip b1 = target.getRootThread().appendBlip(); ObservableConversationBlip b1mirror = mirror.getRootThread().getFirstBlip(); verify(convListener).onBlipAdded(b1mirror); target.getRootThread().insertBlip(b1); ObservableConversationBlip b2mirror = mirror.getRootThread().getFirstBlip(); verify(convListener).onBlipAdded(b2mirror); allowBlipTimestampChanged(convListener); verifyNoMoreInteractions(convListener); } public void testThreadRemovalFiresEvent() { ObservableConversation mirror = mirrorConversation(target); ObservableConversationBlip b1 = target.getRootThread().appendBlip(); ObservableConversationThread t1 = b1.addReplyThread(); ObservableConversationThread t1mirror = mirror.getRootThread().getFirstBlip() .getReplyThreads().iterator().next(); t1.appendBlip(); ObservableConversationBlip b3mirror = t1mirror.getFirstBlip(); mirror.addListener(convListener); // Trigger thread deletion. t1.delete(); verify(convListener).onBlipDeleted(b3mirror); verify(convListener).onThreadDeleted(t1mirror); allowBlipTimestampChanged(convListener); verifyNoMoreInteractions(convListener); } public void testRootThreadRemovalDoesntFireEvent() { ObservableConversation mirror = mirrorConversation(target); target.getRootThread().appendBlip(); ObservableConversationBlip b1mirror = mirror.getRootThread().getFirstBlip(); mirror.addListener(convListener); // Trigger thread deletion. target.getRootThread().delete(); verify(convListener).onBlipDeleted(b1mirror); allowBlipTimestampChanged(convListener); verifyNoMoreInteractions(convListener); } public void testBlipAppendReplyFiresEvent() { ObservableConversation mirror = mirrorConversation(target); ObservableConversationBlip b1 = target.getRootThread().appendBlip(); ObservableConversationBlip b1mirror = mirror.getRootThread().getFirstBlip(); mirror.addListener(convListener); b1.addReplyThread(); ObservableConversationThread t1mirror = b1mirror.getReplyThreads().iterator().next(); verify(convListener).onThreadAdded(t1mirror); verifyNoMoreInteractions(convListener); } public void testBlipRemovalFiresEvent() { ObservableConversation mirror = mirrorConversation(target); ObservableConversationBlip b1 = target.getRootThread().appendBlip(); ObservableConversationBlip b1mirror = mirror.getRootThread().getFirstBlip(); mirror.addListener(convListener); b1.delete(); verify(convListener).onBlipDeleted(b1mirror); allowBlipTimestampChanged(convListener); verifyNoMoreInteractions(convListener); } public void testCompoundEventsFireBottomUp() { ObservableConversation mirror = mirrorConversation(target); // Build tall structure. // rootThread // |- b1 (deleted) // |- t1 // |- b2 ObservableConversationBlip b1 = target.getRootThread().appendBlip(); ObservableConversationThread t1 = b1.addReplyThread(); ObservableConversationBlip b2 = t1.appendBlip(); ObservableConversationBlip b1mirror = mirror.getRootThread().getFirstBlip(); ObservableConversationThread t1mirror = b1mirror.getReplyThreads().iterator().next(); ObservableConversationBlip b2mirror = t1mirror.getFirstBlip(); mirror.addListener(convListener); // Trigger cascading deletion. b1.delete(); // Timestamp changed events may have also occurred on the blip listeners. // Mockito doesn't support atMost on inOrder verifications, hence we cannot // verify those events then verifyNoMoreInteractions on the blip listeners. // TODO(anorth): verifyNoMoreInteractions when the CWM injects a clock. InOrder order = inOrder(convListener); order.verify(convListener).onBlipDeleted(b2mirror); order.verify(convListener).onThreadDeleted(t1mirror); order.verify(convListener).onBlipDeleted(b1mirror); allowBlipTimestampChanged(convListener); verifyNoMoreInteractions(convListener); } public void testRemovedListenersReceiveNoEvents() { ObservableConversation mirror = mirrorConversation(target); ObservableConversationBlip b1 = target.getRootThread().appendBlip(); ObservableConversationThread t1 = b1.addReplyThread(); ObservableConversationBlip b2 = t1.appendBlip(); ObservableConversationBlip b1mirror = mirror.getRootThread().getFirstBlip(); ObservableConversationThread t1mirror = b1mirror.getReplyThreads().iterator().next(); t1mirror.getFirstBlip(); mirror.addListener(convListener); mirror.removeListener(convListener); b1.delete(); verifyNoMoreInteractions(convListener); } // // Data documents // public void testCanGetDataDocument() { MutableDocument<?, ?, ?> doc = target.getDataDocument("some-doc-id"); assertNotNull(doc); } public void testCannotGetBlipAsDataDocument() { ConversationBlip blip = target.getRootThread().appendBlip(); try { target.getDataDocument(blip.getId()); fail("Expected an exception fetching a blip document as a data doc"); } catch (IllegalArgumentException expected) { } } public void testCannotGetManifestAsDataDocument() { try { target.getDataDocument("conversation"); fail("Expected an exception fetching manifest as a data doc"); } catch (IllegalArgumentException expected) { } } public void testWorthynessConstant() { assertEquals(Blips.THREAD_INLINE_ANCHOR_TAGNAME, WorthyChangeChecker.THREAD_INLINE_ANCHOR_TAGNAME); } protected static ConversationBlip getFirstBlip(Conversation conv) { return conv.getRootThread().getFirstBlip(); } /** * Appends a blip to the root thread, and adds a reply to that blip with one * blip. */ protected static void populate(Conversation conv) { ConversationBlip blip = conv.getRootThread().appendBlip(); blip.addReplyThread().appendBlip(); } protected static <N> int locateAfterLineElement(MutableDocument<N, ?, ?> doc) { return locateAfterLineElementInner(doc); } private static <N, E extends N, T extends N> int locateAfterLineElementInner( MutableDocument<N, E, T> doc) { for (E el : DocIterate.deepElementsReverse(doc, doc.getDocumentElement(), null)) { if (LineContainers.isLineContainer(doc, el)) { Point<N> point = Point.inElement((N) el, null); return doc.getLocation(point); } } LineContainers.appendLine(doc, XmlStringBuilder.createEmpty()); return locateAfterLineElement(doc); } /** * Convenience function that returns the blips in a thread as a List. */ protected static List<ConversationBlip> getBlipList(ConversationThread thread) { return CollectionUtils.newArrayList(thread.getBlips()); } /** * Convenience function that returns all reply threads to a blip as a List. */ protected static List<ConversationThread> getAllReplyList(ConversationBlip blip) { return CollectionUtils.newArrayList(blip.getReplyThreads()); } /** * Verifies any number of method invocations on a mock. */ protected static <T> T allow(T mock) { return verify(mock, atMost(Integer.MAX_VALUE)); } /** * Allows any invocations of onBlipTimestampChanged on a mock. */ protected static void allowBlipTimestampChanged(ObservableConversation.Listener mock) { allow(mock).onBlipTimestampChanged(any(ObservableConversationBlip.class), anyLong(), anyLong()); } /** * Checks that the set of all reply threads of a blip is the same as the union * of the inline reply and non-inline reply threads. */ private static void assertThreadChildrenConsistent(ConversationBlip blip) { Set<ConversationThread> allChildren = new HashSet<ConversationThread>(); for (ConversationThread thread : blip.getReplyThreads()) { assertFalse(allChildren.contains(thread)); allChildren.add(thread); } for (ConversationThread child : blip.getReplyThreads()) { assertTrue(allChildren.contains(child)); allChildren.remove(child); } // make sure they are exactly equals assertEquals(0, allChildren.size()); } /** * Checks that a conversation is unusable by attempting mutation. */ protected static void assertConversationUnusable(Conversation conversation) { try { conversation.setAnchor(null); fail("Expected conversation to be unusable"); } catch (IllegalStateException expected) { } try { conversation.getRootThread().appendBlip(); fail("Expected conversation items to be unusable"); } catch (IllegalStateException expected) { } } /** * Checks that a conversation is accessible by examining some state. */ protected static void assertConversationAccessible(Conversation conversation) { conversation.getAnchor(); assertThreadAccessible(conversation.getRootThread()); } /** * Asserts that the state-querying methods on a blip can be called. */ protected static void assertBlipAccessible(ConversationBlip blip) { blip.getReplyThreads(); blip.getAuthorId(); blip.getContent(); blip.getContributorIds(); blip.getConversation(); blip.getId(); blip.locateReplyThreads(); blip.getLastModifiedTime(); blip.getLastModifiedVersion(); blip.getReplyThreads(); blip.getThread(); blip.hackGetRaw(); blip.isRoot(); } /** * Asserts that the state-querying methods on a thread can be called. */ protected static void assertThreadAccessible(ConversationThread thread) { thread.getBlips(); thread.getConversation(); thread.getFirstBlip(); thread.getId(); thread.getParentBlip(); } }
googleapis/google-cloud-java
36,710
java-document-ai/proto-google-cloud-document-ai-v1beta3/src/main/java/com/google/cloud/documentai/v1beta3/BatchDocumentsInputConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1beta3/document_io.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1beta3; /** * * * <pre> * The common config to specify a set of documents used as input. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.BatchDocumentsInputConfig} */ public final class BatchDocumentsInputConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.BatchDocumentsInputConfig) BatchDocumentsInputConfigOrBuilder { private static final long serialVersionUID = 0L; // Use BatchDocumentsInputConfig.newBuilder() to construct. private BatchDocumentsInputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchDocumentsInputConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchDocumentsInputConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentIoProto .internal_static_google_cloud_documentai_v1beta3_BatchDocumentsInputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentIoProto .internal_static_google_cloud_documentai_v1beta3_BatchDocumentsInputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.class, com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.Builder.class); } private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_PREFIX(1), GCS_DOCUMENTS(2), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 1: return GCS_PREFIX; case 2: return GCS_DOCUMENTS; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int GCS_PREFIX_FIELD_NUMBER = 1; /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> * * @return Whether the gcsPrefix field is set. */ @java.lang.Override public boolean hasGcsPrefix() { return sourceCase_ == 1; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> * * @return The gcsPrefix. */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsPrefix getGcsPrefix() { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1beta3.GcsPrefix) source_; } return com.google.cloud.documentai.v1beta3.GcsPrefix.getDefaultInstance(); } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsPrefixOrBuilder getGcsPrefixOrBuilder() { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1beta3.GcsPrefix) source_; } return com.google.cloud.documentai.v1beta3.GcsPrefix.getDefaultInstance(); } public static final int GCS_DOCUMENTS_FIELD_NUMBER = 2; /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> * * @return Whether the gcsDocuments field is set. */ @java.lang.Override public boolean hasGcsDocuments() { return sourceCase_ == 2; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> * * @return The gcsDocuments. */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsDocuments getGcsDocuments() { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1beta3.GcsDocuments) source_; } return com.google.cloud.documentai.v1beta3.GcsDocuments.getDefaultInstance(); } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsDocumentsOrBuilder getGcsDocumentsOrBuilder() { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1beta3.GcsDocuments) source_; } return com.google.cloud.documentai.v1beta3.GcsDocuments.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sourceCase_ == 1) { output.writeMessage(1, (com.google.cloud.documentai.v1beta3.GcsPrefix) source_); } if (sourceCase_ == 2) { output.writeMessage(2, (com.google.cloud.documentai.v1beta3.GcsDocuments) source_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sourceCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.documentai.v1beta3.GcsPrefix) source_); } if (sourceCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.documentai.v1beta3.GcsDocuments) source_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig)) { return super.equals(obj); } com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig other = (com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig) obj; if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 1: if (!getGcsPrefix().equals(other.getGcsPrefix())) return false; break; case 2: if (!getGcsDocuments().equals(other.getGcsDocuments())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (sourceCase_) { case 1: hash = (37 * hash) + GCS_PREFIX_FIELD_NUMBER; hash = (53 * hash) + getGcsPrefix().hashCode(); break; case 2: hash = (37 * hash) + GCS_DOCUMENTS_FIELD_NUMBER; hash = (53 * hash) + getGcsDocuments().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The common config to specify a set of documents used as input. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.BatchDocumentsInputConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.BatchDocumentsInputConfig) com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentIoProto .internal_static_google_cloud_documentai_v1beta3_BatchDocumentsInputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentIoProto .internal_static_google_cloud_documentai_v1beta3_BatchDocumentsInputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.class, com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.Builder.class); } // Construct using com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsPrefixBuilder_ != null) { gcsPrefixBuilder_.clear(); } if (gcsDocumentsBuilder_ != null) { gcsDocumentsBuilder_.clear(); } sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1beta3.DocumentIoProto .internal_static_google_cloud_documentai_v1beta3_BatchDocumentsInputConfig_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig getDefaultInstanceForType() { return com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig build() { com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig buildPartial() { com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig result = new com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0( com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs( com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; if (sourceCase_ == 1 && gcsPrefixBuilder_ != null) { result.source_ = gcsPrefixBuilder_.build(); } if (sourceCase_ == 2 && gcsDocumentsBuilder_ != null) { result.source_ = gcsDocumentsBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig) { return mergeFrom((com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig other) { if (other == com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig.getDefaultInstance()) return this; switch (other.getSourceCase()) { case GCS_PREFIX: { mergeGcsPrefix(other.getGcsPrefix()); break; } case GCS_DOCUMENTS: { mergeGcsDocuments(other.getGcsDocuments()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsPrefixFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 1; break; } // case 10 case 18: { input.readMessage(getGcsDocumentsFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.GcsPrefix, com.google.cloud.documentai.v1beta3.GcsPrefix.Builder, com.google.cloud.documentai.v1beta3.GcsPrefixOrBuilder> gcsPrefixBuilder_; /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> * * @return Whether the gcsPrefix field is set. */ @java.lang.Override public boolean hasGcsPrefix() { return sourceCase_ == 1; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> * * @return The gcsPrefix. */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsPrefix getGcsPrefix() { if (gcsPrefixBuilder_ == null) { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1beta3.GcsPrefix) source_; } return com.google.cloud.documentai.v1beta3.GcsPrefix.getDefaultInstance(); } else { if (sourceCase_ == 1) { return gcsPrefixBuilder_.getMessage(); } return com.google.cloud.documentai.v1beta3.GcsPrefix.getDefaultInstance(); } } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ public Builder setGcsPrefix(com.google.cloud.documentai.v1beta3.GcsPrefix value) { if (gcsPrefixBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsPrefixBuilder_.setMessage(value); } sourceCase_ = 1; return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ public Builder setGcsPrefix( com.google.cloud.documentai.v1beta3.GcsPrefix.Builder builderForValue) { if (gcsPrefixBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsPrefixBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 1; return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ public Builder mergeGcsPrefix(com.google.cloud.documentai.v1beta3.GcsPrefix value) { if (gcsPrefixBuilder_ == null) { if (sourceCase_ == 1 && source_ != com.google.cloud.documentai.v1beta3.GcsPrefix.getDefaultInstance()) { source_ = com.google.cloud.documentai.v1beta3.GcsPrefix.newBuilder( (com.google.cloud.documentai.v1beta3.GcsPrefix) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 1) { gcsPrefixBuilder_.mergeFrom(value); } else { gcsPrefixBuilder_.setMessage(value); } } sourceCase_ = 1; return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ public Builder clearGcsPrefix() { if (gcsPrefixBuilder_ == null) { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; } gcsPrefixBuilder_.clear(); } return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ public com.google.cloud.documentai.v1beta3.GcsPrefix.Builder getGcsPrefixBuilder() { return getGcsPrefixFieldBuilder().getBuilder(); } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsPrefixOrBuilder getGcsPrefixOrBuilder() { if ((sourceCase_ == 1) && (gcsPrefixBuilder_ != null)) { return gcsPrefixBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1beta3.GcsPrefix) source_; } return com.google.cloud.documentai.v1beta3.GcsPrefix.getDefaultInstance(); } } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsPrefix gcs_prefix = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.GcsPrefix, com.google.cloud.documentai.v1beta3.GcsPrefix.Builder, com.google.cloud.documentai.v1beta3.GcsPrefixOrBuilder> getGcsPrefixFieldBuilder() { if (gcsPrefixBuilder_ == null) { if (!(sourceCase_ == 1)) { source_ = com.google.cloud.documentai.v1beta3.GcsPrefix.getDefaultInstance(); } gcsPrefixBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.GcsPrefix, com.google.cloud.documentai.v1beta3.GcsPrefix.Builder, com.google.cloud.documentai.v1beta3.GcsPrefixOrBuilder>( (com.google.cloud.documentai.v1beta3.GcsPrefix) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 1; onChanged(); return gcsPrefixBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.GcsDocuments, com.google.cloud.documentai.v1beta3.GcsDocuments.Builder, com.google.cloud.documentai.v1beta3.GcsDocumentsOrBuilder> gcsDocumentsBuilder_; /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> * * @return Whether the gcsDocuments field is set. */ @java.lang.Override public boolean hasGcsDocuments() { return sourceCase_ == 2; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> * * @return The gcsDocuments. */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsDocuments getGcsDocuments() { if (gcsDocumentsBuilder_ == null) { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1beta3.GcsDocuments) source_; } return com.google.cloud.documentai.v1beta3.GcsDocuments.getDefaultInstance(); } else { if (sourceCase_ == 2) { return gcsDocumentsBuilder_.getMessage(); } return com.google.cloud.documentai.v1beta3.GcsDocuments.getDefaultInstance(); } } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ public Builder setGcsDocuments(com.google.cloud.documentai.v1beta3.GcsDocuments value) { if (gcsDocumentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsDocumentsBuilder_.setMessage(value); } sourceCase_ = 2; return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ public Builder setGcsDocuments( com.google.cloud.documentai.v1beta3.GcsDocuments.Builder builderForValue) { if (gcsDocumentsBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsDocumentsBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 2; return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ public Builder mergeGcsDocuments(com.google.cloud.documentai.v1beta3.GcsDocuments value) { if (gcsDocumentsBuilder_ == null) { if (sourceCase_ == 2 && source_ != com.google.cloud.documentai.v1beta3.GcsDocuments.getDefaultInstance()) { source_ = com.google.cloud.documentai.v1beta3.GcsDocuments.newBuilder( (com.google.cloud.documentai.v1beta3.GcsDocuments) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 2) { gcsDocumentsBuilder_.mergeFrom(value); } else { gcsDocumentsBuilder_.setMessage(value); } } sourceCase_ = 2; return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ public Builder clearGcsDocuments() { if (gcsDocumentsBuilder_ == null) { if (sourceCase_ == 2) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 2) { sourceCase_ = 0; source_ = null; } gcsDocumentsBuilder_.clear(); } return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ public com.google.cloud.documentai.v1beta3.GcsDocuments.Builder getGcsDocumentsBuilder() { return getGcsDocumentsFieldBuilder().getBuilder(); } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ @java.lang.Override public com.google.cloud.documentai.v1beta3.GcsDocumentsOrBuilder getGcsDocumentsOrBuilder() { if ((sourceCase_ == 2) && (gcsDocumentsBuilder_ != null)) { return gcsDocumentsBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1beta3.GcsDocuments) source_; } return com.google.cloud.documentai.v1beta3.GcsDocuments.getDefaultInstance(); } } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1beta3.GcsDocuments gcs_documents = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.GcsDocuments, com.google.cloud.documentai.v1beta3.GcsDocuments.Builder, com.google.cloud.documentai.v1beta3.GcsDocumentsOrBuilder> getGcsDocumentsFieldBuilder() { if (gcsDocumentsBuilder_ == null) { if (!(sourceCase_ == 2)) { source_ = com.google.cloud.documentai.v1beta3.GcsDocuments.getDefaultInstance(); } gcsDocumentsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.GcsDocuments, com.google.cloud.documentai.v1beta3.GcsDocuments.Builder, com.google.cloud.documentai.v1beta3.GcsDocumentsOrBuilder>( (com.google.cloud.documentai.v1beta3.GcsDocuments) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 2; onChanged(); return gcsDocumentsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.BatchDocumentsInputConfig) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.BatchDocumentsInputConfig) private static final com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig(); } public static com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchDocumentsInputConfig> PARSER = new com.google.protobuf.AbstractParser<BatchDocumentsInputConfig>() { @java.lang.Override public BatchDocumentsInputConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchDocumentsInputConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchDocumentsInputConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1beta3.BatchDocumentsInputConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,752
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/UpdateScheduleRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/schedule_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Request message for * [ScheduleService.UpdateSchedule][google.cloud.aiplatform.v1.ScheduleService.UpdateSchedule]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.UpdateScheduleRequest} */ public final class UpdateScheduleRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.UpdateScheduleRequest) UpdateScheduleRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateScheduleRequest.newBuilder() to construct. private UpdateScheduleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateScheduleRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateScheduleRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ScheduleServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateScheduleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ScheduleServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateScheduleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.UpdateScheduleRequest.class, com.google.cloud.aiplatform.v1.UpdateScheduleRequest.Builder.class); } private int bitField0_; public static final int SCHEDULE_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1.Schedule schedule_; /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the schedule field is set. */ @java.lang.Override public boolean hasSchedule() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The schedule. */ @java.lang.Override public com.google.cloud.aiplatform.v1.Schedule getSchedule() { return schedule_ == null ? com.google.cloud.aiplatform.v1.Schedule.getDefaultInstance() : schedule_; } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.ScheduleOrBuilder getScheduleOrBuilder() { return schedule_ == null ? com.google.cloud.aiplatform.v1.Schedule.getDefaultInstance() : schedule_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSchedule()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSchedule()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.UpdateScheduleRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.UpdateScheduleRequest other = (com.google.cloud.aiplatform.v1.UpdateScheduleRequest) obj; if (hasSchedule() != other.hasSchedule()) return false; if (hasSchedule()) { if (!getSchedule().equals(other.getSchedule())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSchedule()) { hash = (37 * hash) + SCHEDULE_FIELD_NUMBER; hash = (53 * hash) + getSchedule().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.UpdateScheduleRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [ScheduleService.UpdateSchedule][google.cloud.aiplatform.v1.ScheduleService.UpdateSchedule]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.UpdateScheduleRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.UpdateScheduleRequest) com.google.cloud.aiplatform.v1.UpdateScheduleRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ScheduleServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateScheduleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ScheduleServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateScheduleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.UpdateScheduleRequest.class, com.google.cloud.aiplatform.v1.UpdateScheduleRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.UpdateScheduleRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getScheduleFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; schedule_ = null; if (scheduleBuilder_ != null) { scheduleBuilder_.dispose(); scheduleBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.ScheduleServiceProto .internal_static_google_cloud_aiplatform_v1_UpdateScheduleRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateScheduleRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.UpdateScheduleRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateScheduleRequest build() { com.google.cloud.aiplatform.v1.UpdateScheduleRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateScheduleRequest buildPartial() { com.google.cloud.aiplatform.v1.UpdateScheduleRequest result = new com.google.cloud.aiplatform.v1.UpdateScheduleRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.UpdateScheduleRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.schedule_ = scheduleBuilder_ == null ? schedule_ : scheduleBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.UpdateScheduleRequest) { return mergeFrom((com.google.cloud.aiplatform.v1.UpdateScheduleRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.UpdateScheduleRequest other) { if (other == com.google.cloud.aiplatform.v1.UpdateScheduleRequest.getDefaultInstance()) return this; if (other.hasSchedule()) { mergeSchedule(other.getSchedule()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getScheduleFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1.Schedule schedule_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.Schedule, com.google.cloud.aiplatform.v1.Schedule.Builder, com.google.cloud.aiplatform.v1.ScheduleOrBuilder> scheduleBuilder_; /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the schedule field is set. */ public boolean hasSchedule() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The schedule. */ public com.google.cloud.aiplatform.v1.Schedule getSchedule() { if (scheduleBuilder_ == null) { return schedule_ == null ? com.google.cloud.aiplatform.v1.Schedule.getDefaultInstance() : schedule_; } else { return scheduleBuilder_.getMessage(); } } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSchedule(com.google.cloud.aiplatform.v1.Schedule value) { if (scheduleBuilder_ == null) { if (value == null) { throw new NullPointerException(); } schedule_ = value; } else { scheduleBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSchedule(com.google.cloud.aiplatform.v1.Schedule.Builder builderForValue) { if (scheduleBuilder_ == null) { schedule_ = builderForValue.build(); } else { scheduleBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSchedule(com.google.cloud.aiplatform.v1.Schedule value) { if (scheduleBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && schedule_ != null && schedule_ != com.google.cloud.aiplatform.v1.Schedule.getDefaultInstance()) { getScheduleBuilder().mergeFrom(value); } else { schedule_ = value; } } else { scheduleBuilder_.mergeFrom(value); } if (schedule_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSchedule() { bitField0_ = (bitField0_ & ~0x00000001); schedule_ = null; if (scheduleBuilder_ != null) { scheduleBuilder_.dispose(); scheduleBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.Schedule.Builder getScheduleBuilder() { bitField0_ |= 0x00000001; onChanged(); return getScheduleFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.ScheduleOrBuilder getScheduleOrBuilder() { if (scheduleBuilder_ != null) { return scheduleBuilder_.getMessageOrBuilder(); } else { return schedule_ == null ? com.google.cloud.aiplatform.v1.Schedule.getDefaultInstance() : schedule_; } } /** * * * <pre> * Required. The Schedule which replaces the resource on the server. * The following restrictions will be applied: * * * The scheduled request type cannot be changed. * * The non-empty fields cannot be unset. * * The output_only fields will be ignored if specified. * </pre> * * <code> * .google.cloud.aiplatform.v1.Schedule schedule = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.Schedule, com.google.cloud.aiplatform.v1.Schedule.Builder, com.google.cloud.aiplatform.v1.ScheduleOrBuilder> getScheduleFieldBuilder() { if (scheduleBuilder_ == null) { scheduleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.Schedule, com.google.cloud.aiplatform.v1.Schedule.Builder, com.google.cloud.aiplatform.v1.ScheduleOrBuilder>( getSchedule(), getParentForChildren(), isClean()); schedule_ = null; } return scheduleBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The update mask applies to the resource. See * [google.protobuf.FieldMask][google.protobuf.FieldMask]. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.UpdateScheduleRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.UpdateScheduleRequest) private static final com.google.cloud.aiplatform.v1.UpdateScheduleRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.UpdateScheduleRequest(); } public static com.google.cloud.aiplatform.v1.UpdateScheduleRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateScheduleRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateScheduleRequest>() { @java.lang.Override public UpdateScheduleRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateScheduleRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateScheduleRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.UpdateScheduleRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ignite
36,470
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/datastructures/GridCacheSetAbstractSelfTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.datastructures; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Stream; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteCluster; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteSet; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CachePeekMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.CollectionConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.GridCacheAdapter; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.IgniteInternalCache; import org.apache.ignite.internal.processors.cache.query.GridCacheQueryManager; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.lang.IgniteRunnable; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.transactions.Transaction; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * Cache set tests. */ public abstract class GridCacheSetAbstractSelfTest extends IgniteCollectionAbstractTest { /** */ protected static final String SET_NAME = "testSet"; /** {@inheritDoc} */ @Override protected int gridCount() { return 4; } /** {@inheritDoc} */ @Override protected CollectionConfiguration collectionConfiguration() { CollectionConfiguration colCfg = super.collectionConfiguration(); if (colCfg.getCacheMode() == PARTITIONED) colCfg.setBackups(1); return colCfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { IgniteSet<Object> set = grid(0).set(SET_NAME, null); if (set != null) set.close(); waitSetResourcesCleared(); assertNull(grid(0).set(SET_NAME, null)); super.afterTest(); } /** * Waits when internal set maps are cleared. * * @throws IgniteCheckedException If failed. */ @SuppressWarnings("ErrorNotRethrown") private void waitSetResourcesCleared() throws IgniteCheckedException { final int MAX_CHECK = 5; for (int i = 0; i < MAX_CHECK; i++) { try { assertSetResourcesCleared(); return; } catch (AssertionError e) { if (i == MAX_CHECK - 1) throw e; log.info("Set resources not cleared, will wait more."); U.sleep(1000); } } } /** * Checks internal set maps are cleared. */ private void assertSetResourcesCleared() { assertSetIteratorsCleared(); for (int i = 0; i < gridCount(); i++) { IgniteKernal grid = (IgniteKernal)grid(i); for (IgniteInternalCache cache : grid.cachesx(null)) { CacheDataStructuresManager dsMgr = cache.context().dataStructures(); Map map = GridTestUtils.getFieldValue(dsMgr, "setsMap"); assertEquals("Set not removed [grid=" + i + ", map=" + map + ']', 0, map.size()); } } } /** * Checks internal iterators maps are cleared. */ private void assertSetIteratorsCleared() { for (int i = 0; i < gridCount(); i++) { IgniteKernal grid = (IgniteKernal)grid(i); for (IgniteCache cache : grid.caches()) { GridCacheQueryManager queries = grid.internalCache(cache.getName()).context().queries(); Map map = GridTestUtils.getFieldValue(queries, GridCacheQueryManager.class, "qryIters"); for (Object obj : map.values()) assertEquals("Iterators not removed for grid " + i, 0, ((Map)obj).size()); } } } /** {@inheritDoc} */ @Override protected long getTestTimeout() { return 2 * 60 * 1000; } /** * @throws Exception If failed. */ @Test public void testCreateRemove() throws Exception { testCreateRemove(false); } /** * @throws Exception If failed. */ @Test public void testCreateRemoveCollocated() throws Exception { testCreateRemove(true); } /** * @param collocated Collocation flag. * @throws Exception If failed. */ protected void testCreateRemove(boolean collocated) throws Exception { testCreateRemove(collocated, 0); } /** * @param collocated Collocation flag. * @param nodeIdx Index of the node from which to create set. * @throws Exception If failed. */ protected void testCreateRemove(boolean collocated, int nodeIdx) throws Exception { for (int i = 0; i < gridCount(); i++) assertNull(grid(i).set(SET_NAME, null)); CollectionConfiguration colCfg0 = config(collocated); IgniteSet<Integer> set0 = grid(nodeIdx).set(SET_NAME, colCfg0); assertNotNull(set0); for (int i = 0; i < gridCount(); i++) { CollectionConfiguration colCfg = config(collocated); IgniteSet<Integer> set = grid(i).set(SET_NAME, colCfg); assertNotNull(set); assertTrue(set.isEmpty()); assertEquals(0, set.size()); assertEquals(SET_NAME, set.name()); if (collectionCacheMode() == PARTITIONED) assertEquals(collocated, set.collocated()); } set0.close(); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { try { for (int i = 0; i < gridCount(); i++) { if (grid(i).set(SET_NAME, null) != null) return false; } return true; } catch (Exception e) { fail("Unexpected exception: " + e); return true; } } }, 1000); for (int i = 0; i < gridCount(); i++) assertNull(grid(i).set(SET_NAME, null)); } /** * @throws Exception If failed. */ @Test public void testApi() throws Exception { testApi(false); } /** * @throws Exception If failed. */ @Test public void testApiCollocated() throws Exception { testApi(true); } /** * @param collocated Collocation flag. * @throws Exception If failed. */ private void testApi(boolean collocated) throws Exception { CollectionConfiguration colCfg = config(collocated); IgniteSet<Object> set0 = grid(0).set(SET_NAME, colCfg); assertNotNull(set0); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertNotNull(set); assertFalse(set.contains(1)); assertEquals(0, set.size()); assertTrue(set.isEmpty()); } // Add, isEmpty. assertTrue(set0.add(1)); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertEquals(1, set.size()); assertFalse(set.isEmpty()); assertTrue(set.contains(1)); assertFalse(set.add(1)); assertFalse(set.contains(100)); } // Remove. assertTrue(set0.remove(1)); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertEquals(0, set.size()); assertTrue(set.isEmpty()); assertFalse(set.contains(1)); assertFalse(set.remove(1)); } // Contains all. Collection<Integer> col1 = new ArrayList<>(); Collection<Integer> col2 = new ArrayList<>(); final int ITEMS = 100; for (int i = 0; i < ITEMS; i++) { assertTrue(grid(i % gridCount()).set(SET_NAME, null).add(i)); col1.add(i); col2.add(i); } col2.add(ITEMS); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertEquals(ITEMS, set.size()); assertTrue(set.containsAll(col1)); assertFalse(set.containsAll(col2)); } // To array. for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertArrayContent(set.toArray(), ITEMS); assertArrayContent(set.toArray(new Integer[ITEMS]), ITEMS); } // Remove all. Collection<Integer> rmvCol = new ArrayList<>(); for (int i = ITEMS - 10; i < ITEMS; i++) rmvCol.add(i); assertTrue(set0.removeAll(rmvCol)); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertFalse(set.removeAll(rmvCol)); for (Integer val : rmvCol) assertFalse(set.contains(val)); assertArrayContent(set.toArray(), ITEMS - 10); assertArrayContent(set.toArray(new Integer[ITEMS - 10]), ITEMS - 10); } // Add all. assertTrue(set0.addAll(rmvCol)); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertEquals(ITEMS, set.size()); assertFalse(set.addAll(rmvCol)); for (Integer val : rmvCol) assertTrue(set.contains(val)); } // Retain all. assertTrue(set0.retainAll(rmvCol)); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertEquals(rmvCol.size(), set.size()); assertFalse(set.retainAll(rmvCol)); for (int val = 0; val < 10; val++) assertFalse(set.contains(val)); for (int val : rmvCol) assertTrue(set.contains(val)); } assertFalse(set0.isEmpty()); // retainAll with empty list: clear the collection and get a boolean value indicating if it was empty or not. assertTrue(set0.retainAll(new ArrayList<>())); assertTrue(set0.isEmpty()); // Clear. set0.add(1); set0.clear(); for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertEquals(0, set.size()); assertTrue(set.isEmpty()); assertFalse(set.contains(0)); } } /** * @throws Exception If failed. */ @Test public void testIterator() throws Exception { testIterator(false); } /** * @throws Exception If failed. */ @Test public void testIteratorCollocated() throws Exception { testIterator(true); } /** * @param collocated Collocation flag. * @throws Exception If failed. */ protected void testIterator(boolean collocated) throws Exception { testIterator(collocated, 0); } /** * @param collocated Collocation flag. * @param nodeIdx Index of the node from which to create set. * @throws Exception If failed. */ protected void testIterator(boolean collocated, int nodeIdx) throws Exception { CollectionConfiguration colCfg = config(collocated); final IgniteSet<Integer> set0 = grid(nodeIdx).set(SET_NAME, colCfg); for (int i = 0; i < gridCount(); i++) { IgniteSet<Integer> set = grid(i).set(SET_NAME, null); assertFalse(set.iterator().hasNext()); } int cnt = 0; for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); for (int j = 0; j < 100; j++) assertTrue(set.add(cnt++)); } for (int i = 0; i < gridCount(); i++) { IgniteSet<Integer> set = grid(i).set(SET_NAME, null); assertSetContent(set, cnt); } // Try to do not use hasNext. Collection<Integer> data = new HashSet<>(cnt); Iterator<Integer> iter = set0.iterator(); for (int i = 0; i < cnt; i++) assertTrue(data.add(iter.next())); assertFalse(iter.hasNext()); assertEquals(cnt, data.size()); for (int i = 0; i < cnt; i++) assertTrue(data.contains(i)); // Iterator for empty set. set0.clear(); for (int i = 0; i < gridCount(); i++) { IgniteSet<Integer> set = grid(i).set(SET_NAME, null); assertFalse(set.iterator().hasNext()); } // Iterator.remove(). for (int i = 0; i < 10; i++) assertTrue(set0.add(i)); iter = set0.iterator(); while (iter.hasNext()) { Integer val = iter.next(); if (val % 2 == 0) iter.remove(); } for (int i = 0; i < gridCount(); i++) { Set<Integer> set = grid(i).set(SET_NAME, null); assertEquals(i % 2 != 0, set.contains(i)); } } /** * @throws Exception If failed. */ @Test public void testIteratorClose() throws Exception { testIteratorClose(false); } /** * @throws Exception If failed. */ @Test public void testIteratorCloseCollocated() throws Exception { testIteratorClose(true); } /** * @param collocated Collocation flag. * @throws Exception If failed. */ @SuppressWarnings("ErrorNotRethrown") private void testIteratorClose(boolean collocated) throws Exception { CollectionConfiguration colCfg = config(collocated); IgniteSet<Integer> set0 = grid(0).set(SET_NAME, colCfg); for (int i = 0; i < 5000; i++) assertTrue(set0.add(i)); createIterators(set0); System.gc(); for (int i = 0; i < 10; i++) { try { set0.size(); // Trigger weak queue poll. assertSetIteratorsCleared(); } catch (AssertionError e) { if (i == 9) throw e; log.info("Set iterators not cleared, will wait"); Thread.sleep(500); } } // Check iterators are closed on set remove. createIterators(set0); int idx = gridCount() > 1 ? 1 : 0; grid(idx).set(SET_NAME, null).close(); for (int i = 0; i < 10; i++) { try { assertSetIteratorsCleared(); } catch (AssertionError e) { if (i == 9) throw e; log.info("Set iterators not cleared, will wait"); Thread.sleep(500); } } } /** * @param set Set. */ private void createIterators(IgniteSet<Integer> set) { for (int i = 0; i < 10; i++) { Iterator<Integer> iter = set.iterator(); assertTrue(iter.hasNext()); iter.next(); assertTrue(iter.hasNext()); } } /** * @throws Exception If failed. */ @Test public void testNodeJoinsAndLeaves() throws Exception { testNodeJoinsAndLeaves(false); } /** * @throws Exception If failed. */ @Test public void testNodeJoinsAndLeavesCollocated() throws Exception { testNodeJoinsAndLeaves(true); } /** * @param collocated Collocation flag. * @throws Exception If failed. */ private void testNodeJoinsAndLeaves(boolean collocated) throws Exception { CollectionConfiguration colCfg = config(collocated); Set<Integer> set0 = grid(0).set(SET_NAME, colCfg); final int ITEMS = 10_000; for (int i = 0; i < ITEMS; i++) set0.add(i); startGrid(gridCount()); awaitPartitionMapExchange(); try { IgniteSet<Integer> set1 = grid(0).set(SET_NAME, null); assertNotNull(set1); for (int i = 0; i < gridCount() + 1; i++) { IgniteSet<Integer> set = grid(i).set(SET_NAME, null); assertEquals(ITEMS, set.size()); assertSetContent(set, ITEMS); } } finally { stopGrid(gridCount()); } // Stopping a node will cause data loss with zero backups. if (colCfg.getBackups() != 0) { for (int i = 0; i < gridCount(); i++) { IgniteSet<Integer> set = grid(i).set(SET_NAME, null); assertSetContent(set, ITEMS); } } } /** * @throws Exception If failed. */ @Test public void testMultithreaded() throws Exception { testMultithreaded(false); } /** * @throws Exception If failed. */ @Test public void testMultithreadedCollocated() throws Exception { if (collectionCacheMode() != PARTITIONED) return; testMultithreaded(true); } /** * @param collocated Collocation flag. * @throws Exception If failed. */ private void testMultithreaded(final boolean collocated) throws Exception { CollectionConfiguration colCfg = config(collocated); Set<Integer> set0 = grid(0).set(SET_NAME, colCfg); assertNotNull(set0); Collection<IgniteInternalFuture> futs = new ArrayList<>(); final int THREADS_PER_NODE = 5; final int KEY_RANGE = 10_000; final int ITERATIONS = GridTestUtils.SF.applyLB(2000, 100); for (int i = 0; i < gridCount(); i++) { final int idx = i; futs.add(GridTestUtils.runMultiThreadedAsync(new Callable<Void>() { @Override public Void call() throws Exception { IgniteSet<Integer> set = grid(idx).set(SET_NAME, null); assertNotNull(set); ThreadLocalRandom rnd = ThreadLocalRandom.current(); for (int i = 0; i < ITERATIONS; i++) { switch (rnd.nextInt(4)) { case 0: set.add(rnd.nextInt(KEY_RANGE)); break; case 1: set.remove(rnd.nextInt(KEY_RANGE)); break; case 2: set.contains(rnd.nextInt(KEY_RANGE)); break; case 3: for (Integer val : set) assertNotNull(val); break; default: fail(); } if ((i + 1) % 500 == 0) log.info("Executed iterations: " + (i + 1)); } return null; } }, THREADS_PER_NODE, "testSetMultithreaded")); } for (IgniteInternalFuture fut : futs) fut.get(); } /** * @throws Exception If failed. */ @Test public void testCleanup() throws Exception { testCleanup(false); } /** * @throws Exception If failed. */ @Test public void testCleanupCollocated() throws Exception { testCleanup(true); } /** * @param collocated Collocation flag. * @throws Exception If failed. */ private void testCleanup(boolean collocated) throws Exception { CollectionConfiguration colCfg = config(collocated); final IgniteSet<Integer> set0 = grid(0).set(SET_NAME, colCfg); assertNotNull(set0); final Collection<Set<Integer>> sets = new ArrayList<>(); for (int i = 0; i < gridCount(); i++) { IgniteSet<Integer> set = grid(i).set(SET_NAME, null); assertNotNull(set); sets.add(set); } Collection<Integer> items = new ArrayList<>(10_000); for (int i = 0; i < 10_000; i++) items.add(i); set0.addAll(items); assertEquals(10_000, set0.size()); final AtomicBoolean stop = new AtomicBoolean(); final AtomicInteger val = new AtomicInteger(10_000); IgniteInternalFuture<?> fut; try { fut = GridTestUtils.runMultiThreadedAsync(new Callable<Object>() { @Override public Object call() throws Exception { try { while (!stop.get()) { for (Set<Integer> set : sets) set.add(val.incrementAndGet()); } } catch (IllegalStateException e) { log.info("Set removed: " + e); } return null; } }, 5, "set-add-thread"); set0.close(); } finally { stop.set(true); } fut.get(); int cnt = 0; GridCacheContext cctx = GridTestUtils.getFieldValue(set0, "cctx"); boolean separated = separated(set0); if (separated) awaitPartitionMapExchange(); for (int i = 0; i < gridCount(); i++) { GridCacheAdapter cache = grid(i).context().cache().internalCache(cctx.name()); if (separated) { assertNull("Cache " + cctx.name() + " was not destroyed.", cache); continue; } for (Object e : cache.localEntries(new CachePeekMode[]{CachePeekMode.ALL})) { cnt++; log.info("Unexpected entry: " + e); } } assertEquals("Found unexpected cache entries", 0, cnt); for (final Set<Integer> set : sets) { GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { set.add(10); return null; } }, IllegalStateException.class, null); } } /** * @throws Exception If failed. */ @Test public void testSerialization() throws Exception { final IgniteSet<Integer> set = grid(0).set(SET_NAME, config(false)); assertNotNull(set); for (int i = 0; i < 10; i++) set.add(i); IgniteCluster cluster = grid(0).cluster(); Collection<Integer> c = grid(0).compute(cluster).broadcast(new IgniteCallable<Integer>() { @Override public Integer call() throws Exception { assertEquals(SET_NAME, set.name()); return set.size(); } }); assertEquals(gridCount(), c.size()); for (Integer size : c) assertEquals((Integer)10, size); } /** * @throws Exception If failed. */ @Test public void testAffinityRun() throws Exception { final CollectionConfiguration colCfg = collectionConfiguration(); colCfg.setCollocated(false); colCfg.setCacheMode(CacheMode.PARTITIONED); colCfg.setGroupName("testGroup"); try (final IgniteSet<Integer> set1 = grid(0).set("Set1", colCfg)) { GridTestUtils.assertThrows( log, new Callable<Void>() { @Override public Void call() throws Exception { set1.affinityRun(new IgniteRunnable() { @Override public void run() { // No-op. } }); return null; } }, IgniteException.class, "Failed to execute affinityRun() for non-collocated set: " + set1.name() + ". This operation is supported only for collocated sets."); } colCfg.setCollocated(true); try (final IgniteSet<Integer> set2 = grid(0).set("Set2", colCfg)) { set2.add(100); final String cacheName = cctx(set2).name(); set2.affinityRun(new IgniteRunnable() { @IgniteInstanceResource private IgniteEx ignite; @Override public void run() { assertTrue(ignite.cachex(cacheName).affinity().isPrimaryOrBackup( ignite.cluster().localNode(), "Set2")); assertEquals(100, set2.iterator().next().intValue()); } }); } } /** * @throws Exception If failed. */ @Test public void testAffinityCall() throws Exception { final CollectionConfiguration colCfg = collectionConfiguration(); colCfg.setCollocated(false); colCfg.setCacheMode(CacheMode.PARTITIONED); colCfg.setGroupName("testGroup"); try (final IgniteSet<Integer> set1 = grid(0).set("Set1", colCfg)) { GridTestUtils.assertThrows( log, new Callable<Void>() { @Override public Void call() throws Exception { set1.affinityCall(new IgniteCallable<Object>() { @Override public Object call() { return null; } }); return null; } }, IgniteException.class, "Failed to execute affinityCall() for non-collocated set: " + set1.name() + ". This operation is supported only for collocated sets."); } colCfg.setCollocated(true); try (final IgniteSet<Integer> set2 = grid(0).set("Set2", colCfg)) { set2.add(100); final String cacheName = cctx(set2).name(); Integer res = set2.affinityCall(new IgniteCallable<Integer>() { @IgniteInstanceResource private IgniteEx ignite; @Override public Integer call() { assertTrue(ignite.cachex(cacheName).affinity().isPrimaryOrBackup( ignite.cluster().localNode(), "Set2")); return set2.iterator().next(); } }); assertEquals(100, res.intValue()); } } /** * Implementation of ignite data structures internally uses special system caches, need make sure * that transaction on these system caches do not intersect with transactions started by user. * * @throws Exception If failed. */ @Test public void testIsolation() throws Exception { CollectionConfiguration colCfg = collectionConfiguration(); Ignite ignite = grid(0); CacheConfiguration cfg = new CacheConfiguration(DEFAULT_CACHE_NAME); cfg.setName("myCache"); cfg.setAtomicityMode(TRANSACTIONAL); cfg.setWriteSynchronizationMode(FULL_SYNC); IgniteCache<Integer, Integer> cache = ignite.getOrCreateCache(cfg); try { IgniteSet<Integer> set0 = ignite.set(SET_NAME, colCfg); assertNotNull(set0); try (Transaction tx = ignite.transactions().txStart()) { cache.put(1, 1); Collection<Integer> items = new ArrayList<>(100); for (int i = 0; i < 100; i++) items.add(i); set0.addAll(items); tx.rollback(); } assertEquals(0, cache.size()); assertEquals(100, set0.size()); set0.close(); } finally { ignite.destroyCache(cfg.getName()); } } /** * Test that non collocated sets are stored in a separated cache. */ @Test public void testCacheReuse() { testCacheReuse(false); } /** * Test that collocated sets within the same group and compatible configurations are stored in the same cache. */ @Test public void testCacheReuseCollocated() { testCacheReuse(true); } /** * @param collocated Collocation flag. */ private void testCacheReuse(boolean collocated) { Ignite ignite = grid(0); CollectionConfiguration colCfg = collectionConfiguration().setCollocated(collocated); colCfg.setAtomicityMode(ATOMIC); colCfg.setGroupName("grp1"); IgniteSet set1 = ignite.set("set1", colCfg); IgniteSet set2 = ignite.set("set2", colCfg); assertEquals(separated(set1), cctx(set1).cacheId() != cctx(set2).cacheId()); colCfg.setAtomicityMode(TRANSACTIONAL); IgniteSet set3 = ignite.set("set3", colCfg); IgniteSet set4 = ignite.set("set4", colCfg); assertEquals(separated(set3), cctx(set3).cacheId() != cctx(set4).cacheId()); assertTrue(cctx(set1).cacheId() != cctx(set3).cacheId()); assertTrue(cctx(set1).groupId() == cctx(set3).groupId()); colCfg.setGroupName("gtp2"); IgniteSet set5 = ignite.set("set5", colCfg); IgniteSet set6 = ignite.set("set6", colCfg); assertEquals(separated(set5), cctx(set5).cacheId() != cctx(set6).cacheId()); assertTrue(cctx(set1).groupId() != cctx(set5).groupId()); Stream.of(set1, set2, set3, set4, set5, set6).forEach(IgniteSet::close); } /** * Tests that basic API works correctly when there are multiple structures in multiple groups. * * @throws Exception If failed. */ @Test public void testMultipleStructuresInDifferentGroups() throws Exception { Ignite ignite = grid(0); CollectionConfiguration cfg1 = collectionConfiguration(); CollectionConfiguration cfg2 = collectionConfiguration().setGroupName("grp2"); IgniteSet<String> set1 = ignite.set("set1", cfg1); IgniteSet<String> set2 = ignite.set("set2", cfg1); IgniteSet<String> set3 = ignite.set("set3", cfg2); IgniteSet<String> set4 = ignite.set("set4", cfg2); assertTrue(set1.add("a")); assertTrue(set2.add("b")); assertTrue(set3.add("c")); assertTrue(set4.add("d")); assertFalse(set1.add("a")); assertFalse(set2.add("b")); assertFalse(set3.add("c")); assertFalse(set4.add("d")); assertTrue(set1.contains("a")); assertTrue(set2.contains("b")); assertTrue(set3.contains("c")); assertTrue(set4.contains("d")); assertEquals(1, set1.size()); assertEquals(1, set2.size()); assertEquals(1, set3.size()); assertEquals(1, set4.size()); assertFalse(set1.remove("z")); assertFalse(set2.remove("z")); assertFalse(set3.remove("z")); assertFalse(set4.remove("z")); assertTrue(set1.remove("a")); assertTrue(set2.remove("b")); assertTrue(set3.remove("c")); assertTrue(set4.remove("d")); assertTrue(set1.isEmpty()); assertTrue(set2.isEmpty()); assertTrue(set3.isEmpty()); assertTrue(set4.isEmpty()); set2.close(); set4.close(); assertTrue(set2.removed()); assertTrue(set4.removed()); assertFalse(set1.removed()); assertFalse(set3.removed()); assertNotNull(ignite.set("set1", null)); assertNull(ignite.set("set2", null)); set1.close(); set3.close(); } /** * Tests that new set with the same name as an old removed set does not contain old data. */ @Test @SuppressWarnings("ThrowableNotThrown") public void testCloseAndCreateWithSameName() { Ignite ignite = grid(0); CollectionConfiguration cfg = collectionConfiguration(); IgniteSet<Integer> oldSet = ignite.set("testRemoveAndCreateWithSameName", cfg); IgniteSet<Integer> oldSet2 = ignite.set(oldSet.name(), null); oldSet.add(1); oldSet.close(); IgniteSet<Integer> newSet = ignite.set(oldSet.name(), cfg); assertEquals(0, newSet.size()); assertTrue(oldSet.removed()); assertTrue(oldSet2.removed()); String msg = "Set has been removed from cache"; GridTestUtils.assertThrows(null, oldSet::size, IllegalStateException.class, msg); GridTestUtils.assertThrows(null, oldSet2::size, IllegalStateException.class, msg); newSet.close(); } /** * Tests multiple sets with the same name but different cache options. */ @Test public void testSameNameDifferentOptions() { Ignite ignite = grid(0); String name = "testSameNameDifferentOptions"; CollectionConfiguration cfg1 = new CollectionConfiguration() .setGroupName("gp1"); CollectionConfiguration cfg2 = new CollectionConfiguration() .setGroupName("gp1") .setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); IgniteSet<Integer> set1 = ignite.set(name, cfg1); IgniteSet<Integer> set1_1 = ignite.set(name, cfg1); IgniteSet<Integer> set2 = ignite.set(name, cfg2); IgniteSet<Integer> set2_2 = ignite.set(name, cfg2); set1.add(1); assertEquals(1, set1.size()); assertEquals(1, set1_1.size()); assertEquals(0, set2.size()); assertEquals(0, set2_2.size()); set1.close(); set2.close(); } /** * @param set Set. * @param size Expected size. */ private void assertSetContent(IgniteSet<Integer> set, int size) { Collection<Integer> data = new HashSet<>(size); for (Integer val : set) assertTrue(data.add(val)); assertEquals(size, data.size()); for (int val = 0; val < size; val++) assertTrue(data.contains(val)); } /** * @param arr Array. * @param size Expected size. */ private void assertArrayContent(Object[] arr, int size) { assertEquals(size, arr.length); for (int i = 0; i < size; i++) { boolean found = false; for (Object obj : arr) { if (obj.equals(i)) { found = true; break; } } assertTrue(found); } } }