repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/google-cloud-java
37,104
java-cloudsupport/proto-google-cloud-cloudsupport-v2/src/main/java/com/google/cloud/support/v2/ListAttachmentsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/support/v2/attachment_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.support.v2; /** * * * <pre> * The response message for the ListAttachments endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2.ListAttachmentsResponse} */ public final class ListAttachmentsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.support.v2.ListAttachmentsResponse) ListAttachmentsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListAttachmentsResponse.newBuilder() to construct. private ListAttachmentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAttachmentsResponse() { attachments_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAttachmentsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2.AttachmentServiceProto .internal_static_google_cloud_support_v2_ListAttachmentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2.AttachmentServiceProto .internal_static_google_cloud_support_v2_ListAttachmentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2.ListAttachmentsResponse.class, com.google.cloud.support.v2.ListAttachmentsResponse.Builder.class); } public static final int ATTACHMENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.support.v2.Attachment> attachments_; /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.support.v2.Attachment> getAttachmentsList() { return attachments_; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.support.v2.AttachmentOrBuilder> getAttachmentsOrBuilderList() { return attachments_; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ @java.lang.Override public int getAttachmentsCount() { return attachments_.size(); } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2.Attachment getAttachments(int index) { return attachments_.get(index); } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ @java.lang.Override public com.google.cloud.support.v2.AttachmentOrBuilder getAttachmentsOrBuilder(int index) { return attachments_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.attachments.list` requests. If unspecified, * there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.attachments.list` requests. If unspecified, * there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < attachments_.size(); i++) { output.writeMessage(1, attachments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < attachments_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, attachments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.support.v2.ListAttachmentsResponse)) { return super.equals(obj); } com.google.cloud.support.v2.ListAttachmentsResponse other = (com.google.cloud.support.v2.ListAttachmentsResponse) obj; if (!getAttachmentsList().equals(other.getAttachmentsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAttachmentsCount() > 0) { hash = (37 * hash) + ATTACHMENTS_FIELD_NUMBER; hash = (53 * hash) + getAttachmentsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.support.v2.ListAttachmentsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.support.v2.ListAttachmentsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the ListAttachments endpoint. * </pre> * * Protobuf type {@code google.cloud.support.v2.ListAttachmentsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.support.v2.ListAttachmentsResponse) com.google.cloud.support.v2.ListAttachmentsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.support.v2.AttachmentServiceProto .internal_static_google_cloud_support_v2_ListAttachmentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.support.v2.AttachmentServiceProto .internal_static_google_cloud_support_v2_ListAttachmentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.support.v2.ListAttachmentsResponse.class, com.google.cloud.support.v2.ListAttachmentsResponse.Builder.class); } // Construct using com.google.cloud.support.v2.ListAttachmentsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (attachmentsBuilder_ == null) { attachments_ = java.util.Collections.emptyList(); } else { attachments_ = null; attachmentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.support.v2.AttachmentServiceProto .internal_static_google_cloud_support_v2_ListAttachmentsResponse_descriptor; } @java.lang.Override public com.google.cloud.support.v2.ListAttachmentsResponse getDefaultInstanceForType() { return com.google.cloud.support.v2.ListAttachmentsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.support.v2.ListAttachmentsResponse build() { com.google.cloud.support.v2.ListAttachmentsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.support.v2.ListAttachmentsResponse buildPartial() { com.google.cloud.support.v2.ListAttachmentsResponse result = new com.google.cloud.support.v2.ListAttachmentsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.support.v2.ListAttachmentsResponse result) { if (attachmentsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { attachments_ = java.util.Collections.unmodifiableList(attachments_); bitField0_ = (bitField0_ & ~0x00000001); } result.attachments_ = attachments_; } else { result.attachments_ = attachmentsBuilder_.build(); } } private void buildPartial0(com.google.cloud.support.v2.ListAttachmentsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.support.v2.ListAttachmentsResponse) { return mergeFrom((com.google.cloud.support.v2.ListAttachmentsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.support.v2.ListAttachmentsResponse other) { if (other == com.google.cloud.support.v2.ListAttachmentsResponse.getDefaultInstance()) return this; if (attachmentsBuilder_ == null) { if (!other.attachments_.isEmpty()) { if (attachments_.isEmpty()) { attachments_ = other.attachments_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAttachmentsIsMutable(); attachments_.addAll(other.attachments_); } onChanged(); } } else { if (!other.attachments_.isEmpty()) { if (attachmentsBuilder_.isEmpty()) { attachmentsBuilder_.dispose(); attachmentsBuilder_ = null; attachments_ = other.attachments_; bitField0_ = (bitField0_ & ~0x00000001); attachmentsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAttachmentsFieldBuilder() : null; } else { attachmentsBuilder_.addAllMessages(other.attachments_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.support.v2.Attachment m = input.readMessage( com.google.cloud.support.v2.Attachment.parser(), extensionRegistry); if (attachmentsBuilder_ == null) { ensureAttachmentsIsMutable(); attachments_.add(m); } else { attachmentsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.support.v2.Attachment> attachments_ = java.util.Collections.emptyList(); private void ensureAttachmentsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { attachments_ = new java.util.ArrayList<com.google.cloud.support.v2.Attachment>(attachments_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Attachment, com.google.cloud.support.v2.Attachment.Builder, com.google.cloud.support.v2.AttachmentOrBuilder> attachmentsBuilder_; /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public java.util.List<com.google.cloud.support.v2.Attachment> getAttachmentsList() { if (attachmentsBuilder_ == null) { return java.util.Collections.unmodifiableList(attachments_); } else { return attachmentsBuilder_.getMessageList(); } } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public int getAttachmentsCount() { if (attachmentsBuilder_ == null) { return attachments_.size(); } else { return attachmentsBuilder_.getCount(); } } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public com.google.cloud.support.v2.Attachment getAttachments(int index) { if (attachmentsBuilder_ == null) { return attachments_.get(index); } else { return attachmentsBuilder_.getMessage(index); } } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder setAttachments(int index, com.google.cloud.support.v2.Attachment value) { if (attachmentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttachmentsIsMutable(); attachments_.set(index, value); onChanged(); } else { attachmentsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder setAttachments( int index, com.google.cloud.support.v2.Attachment.Builder builderForValue) { if (attachmentsBuilder_ == null) { ensureAttachmentsIsMutable(); attachments_.set(index, builderForValue.build()); onChanged(); } else { attachmentsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder addAttachments(com.google.cloud.support.v2.Attachment value) { if (attachmentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttachmentsIsMutable(); attachments_.add(value); onChanged(); } else { attachmentsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder addAttachments(int index, com.google.cloud.support.v2.Attachment value) { if (attachmentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAttachmentsIsMutable(); attachments_.add(index, value); onChanged(); } else { attachmentsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder addAttachments(com.google.cloud.support.v2.Attachment.Builder builderForValue) { if (attachmentsBuilder_ == null) { ensureAttachmentsIsMutable(); attachments_.add(builderForValue.build()); onChanged(); } else { attachmentsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder addAttachments( int index, com.google.cloud.support.v2.Attachment.Builder builderForValue) { if (attachmentsBuilder_ == null) { ensureAttachmentsIsMutable(); attachments_.add(index, builderForValue.build()); onChanged(); } else { attachmentsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder addAllAttachments( java.lang.Iterable<? extends com.google.cloud.support.v2.Attachment> values) { if (attachmentsBuilder_ == null) { ensureAttachmentsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, attachments_); onChanged(); } else { attachmentsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder clearAttachments() { if (attachmentsBuilder_ == null) { attachments_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { attachmentsBuilder_.clear(); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public Builder removeAttachments(int index) { if (attachmentsBuilder_ == null) { ensureAttachmentsIsMutable(); attachments_.remove(index); onChanged(); } else { attachmentsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public com.google.cloud.support.v2.Attachment.Builder getAttachmentsBuilder(int index) { return getAttachmentsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public com.google.cloud.support.v2.AttachmentOrBuilder getAttachmentsOrBuilder(int index) { if (attachmentsBuilder_ == null) { return attachments_.get(index); } else { return attachmentsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public java.util.List<? extends com.google.cloud.support.v2.AttachmentOrBuilder> getAttachmentsOrBuilderList() { if (attachmentsBuilder_ != null) { return attachmentsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(attachments_); } } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public com.google.cloud.support.v2.Attachment.Builder addAttachmentsBuilder() { return getAttachmentsFieldBuilder() .addBuilder(com.google.cloud.support.v2.Attachment.getDefaultInstance()); } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public com.google.cloud.support.v2.Attachment.Builder addAttachmentsBuilder(int index) { return getAttachmentsFieldBuilder() .addBuilder(index, com.google.cloud.support.v2.Attachment.getDefaultInstance()); } /** * * * <pre> * The list of attachments associated with a case. * </pre> * * <code>repeated .google.cloud.support.v2.Attachment attachments = 1;</code> */ public java.util.List<com.google.cloud.support.v2.Attachment.Builder> getAttachmentsBuilderList() { return getAttachmentsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Attachment, com.google.cloud.support.v2.Attachment.Builder, com.google.cloud.support.v2.AttachmentOrBuilder> getAttachmentsFieldBuilder() { if (attachmentsBuilder_ == null) { attachmentsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.support.v2.Attachment, com.google.cloud.support.v2.Attachment.Builder, com.google.cloud.support.v2.AttachmentOrBuilder>( attachments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); attachments_ = null; } return attachmentsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.attachments.list` requests. If unspecified, * there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.attachments.list` requests. If unspecified, * there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.attachments.list` requests. If unspecified, * there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.attachments.list` requests. If unspecified, * there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. Set this in the `page_token` * field of subsequent `cases.attachments.list` requests. If unspecified, * there are no more results to retrieve. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.support.v2.ListAttachmentsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.support.v2.ListAttachmentsResponse) private static final com.google.cloud.support.v2.ListAttachmentsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.support.v2.ListAttachmentsResponse(); } public static com.google.cloud.support.v2.ListAttachmentsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAttachmentsResponse> PARSER = new com.google.protobuf.AbstractParser<ListAttachmentsResponse>() { @java.lang.Override public ListAttachmentsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListAttachmentsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAttachmentsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.support.v2.ListAttachmentsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hive
37,359
standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hive.metastore.api; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class PrincipalPrivilegeSet implements org.apache.thrift.TBase<PrincipalPrivilegeSet, PrincipalPrivilegeSet._Fields>, java.io.Serializable, Cloneable, Comparable<PrincipalPrivilegeSet> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PrincipalPrivilegeSet"); private static final org.apache.thrift.protocol.TField USER_PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("userPrivileges", org.apache.thrift.protocol.TType.MAP, (short)1); private static final org.apache.thrift.protocol.TField GROUP_PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("groupPrivileges", org.apache.thrift.protocol.TType.MAP, (short)2); private static final org.apache.thrift.protocol.TField ROLE_PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("rolePrivileges", org.apache.thrift.protocol.TType.MAP, (short)3); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new PrincipalPrivilegeSetStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new PrincipalPrivilegeSetTupleSchemeFactory(); private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> userPrivileges; // required private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> groupPrivileges; // required private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> rolePrivileges; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { USER_PRIVILEGES((short)1, "userPrivileges"), GROUP_PRIVILEGES((short)2, "groupPrivileges"), ROLE_PRIVILEGES((short)3, "rolePrivileges"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // USER_PRIVILEGES return USER_PRIVILEGES; case 2: // GROUP_PRIVILEGES return GROUP_PRIVILEGES; case 3: // ROLE_PRIVILEGES return ROLE_PRIVILEGES; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.USER_PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("userPrivileges", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PrivilegeGrantInfo.class))))); tmpMap.put(_Fields.GROUP_PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("groupPrivileges", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PrivilegeGrantInfo.class))))); tmpMap.put(_Fields.ROLE_PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("rolePrivileges", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PrivilegeGrantInfo.class))))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(PrincipalPrivilegeSet.class, metaDataMap); } public PrincipalPrivilegeSet() { } public PrincipalPrivilegeSet( java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> userPrivileges, java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> groupPrivileges, java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> rolePrivileges) { this(); this.userPrivileges = userPrivileges; this.groupPrivileges = groupPrivileges; this.rolePrivileges = rolePrivileges; } /** * Performs a deep copy on <i>other</i>. */ public PrincipalPrivilegeSet(PrincipalPrivilegeSet other) { if (other.isSetUserPrivileges()) { java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> __this__userPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(other.userPrivileges.size()); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> other_element : other.userPrivileges.entrySet()) { java.lang.String other_element_key = other_element.getKey(); java.util.List<PrivilegeGrantInfo> other_element_value = other_element.getValue(); java.lang.String __this__userPrivileges_copy_key = other_element_key; java.util.List<PrivilegeGrantInfo> __this__userPrivileges_copy_value = new java.util.ArrayList<PrivilegeGrantInfo>(other_element_value.size()); for (PrivilegeGrantInfo other_element_value_element : other_element_value) { __this__userPrivileges_copy_value.add(new PrivilegeGrantInfo(other_element_value_element)); } __this__userPrivileges.put(__this__userPrivileges_copy_key, __this__userPrivileges_copy_value); } this.userPrivileges = __this__userPrivileges; } if (other.isSetGroupPrivileges()) { java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> __this__groupPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(other.groupPrivileges.size()); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> other_element : other.groupPrivileges.entrySet()) { java.lang.String other_element_key = other_element.getKey(); java.util.List<PrivilegeGrantInfo> other_element_value = other_element.getValue(); java.lang.String __this__groupPrivileges_copy_key = other_element_key; java.util.List<PrivilegeGrantInfo> __this__groupPrivileges_copy_value = new java.util.ArrayList<PrivilegeGrantInfo>(other_element_value.size()); for (PrivilegeGrantInfo other_element_value_element : other_element_value) { __this__groupPrivileges_copy_value.add(new PrivilegeGrantInfo(other_element_value_element)); } __this__groupPrivileges.put(__this__groupPrivileges_copy_key, __this__groupPrivileges_copy_value); } this.groupPrivileges = __this__groupPrivileges; } if (other.isSetRolePrivileges()) { java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> __this__rolePrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(other.rolePrivileges.size()); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> other_element : other.rolePrivileges.entrySet()) { java.lang.String other_element_key = other_element.getKey(); java.util.List<PrivilegeGrantInfo> other_element_value = other_element.getValue(); java.lang.String __this__rolePrivileges_copy_key = other_element_key; java.util.List<PrivilegeGrantInfo> __this__rolePrivileges_copy_value = new java.util.ArrayList<PrivilegeGrantInfo>(other_element_value.size()); for (PrivilegeGrantInfo other_element_value_element : other_element_value) { __this__rolePrivileges_copy_value.add(new PrivilegeGrantInfo(other_element_value_element)); } __this__rolePrivileges.put(__this__rolePrivileges_copy_key, __this__rolePrivileges_copy_value); } this.rolePrivileges = __this__rolePrivileges; } } public PrincipalPrivilegeSet deepCopy() { return new PrincipalPrivilegeSet(this); } @Override public void clear() { this.userPrivileges = null; this.groupPrivileges = null; this.rolePrivileges = null; } public int getUserPrivilegesSize() { return (this.userPrivileges == null) ? 0 : this.userPrivileges.size(); } public void putToUserPrivileges(java.lang.String key, java.util.List<PrivilegeGrantInfo> val) { if (this.userPrivileges == null) { this.userPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(); } this.userPrivileges.put(key, val); } @org.apache.thrift.annotation.Nullable public java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> getUserPrivileges() { return this.userPrivileges; } public void setUserPrivileges(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> userPrivileges) { this.userPrivileges = userPrivileges; } public void unsetUserPrivileges() { this.userPrivileges = null; } /** Returns true if field userPrivileges is set (has been assigned a value) and false otherwise */ public boolean isSetUserPrivileges() { return this.userPrivileges != null; } public void setUserPrivilegesIsSet(boolean value) { if (!value) { this.userPrivileges = null; } } public int getGroupPrivilegesSize() { return (this.groupPrivileges == null) ? 0 : this.groupPrivileges.size(); } public void putToGroupPrivileges(java.lang.String key, java.util.List<PrivilegeGrantInfo> val) { if (this.groupPrivileges == null) { this.groupPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(); } this.groupPrivileges.put(key, val); } @org.apache.thrift.annotation.Nullable public java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> getGroupPrivileges() { return this.groupPrivileges; } public void setGroupPrivileges(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> groupPrivileges) { this.groupPrivileges = groupPrivileges; } public void unsetGroupPrivileges() { this.groupPrivileges = null; } /** Returns true if field groupPrivileges is set (has been assigned a value) and false otherwise */ public boolean isSetGroupPrivileges() { return this.groupPrivileges != null; } public void setGroupPrivilegesIsSet(boolean value) { if (!value) { this.groupPrivileges = null; } } public int getRolePrivilegesSize() { return (this.rolePrivileges == null) ? 0 : this.rolePrivileges.size(); } public void putToRolePrivileges(java.lang.String key, java.util.List<PrivilegeGrantInfo> val) { if (this.rolePrivileges == null) { this.rolePrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(); } this.rolePrivileges.put(key, val); } @org.apache.thrift.annotation.Nullable public java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> getRolePrivileges() { return this.rolePrivileges; } public void setRolePrivileges(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>> rolePrivileges) { this.rolePrivileges = rolePrivileges; } public void unsetRolePrivileges() { this.rolePrivileges = null; } /** Returns true if field rolePrivileges is set (has been assigned a value) and false otherwise */ public boolean isSetRolePrivileges() { return this.rolePrivileges != null; } public void setRolePrivilegesIsSet(boolean value) { if (!value) { this.rolePrivileges = null; } } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case USER_PRIVILEGES: if (value == null) { unsetUserPrivileges(); } else { setUserPrivileges((java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>>)value); } break; case GROUP_PRIVILEGES: if (value == null) { unsetGroupPrivileges(); } else { setGroupPrivileges((java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>>)value); } break; case ROLE_PRIVILEGES: if (value == null) { unsetRolePrivileges(); } else { setRolePrivileges((java.util.Map<java.lang.String,java.util.List<PrivilegeGrantInfo>>)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case USER_PRIVILEGES: return getUserPrivileges(); case GROUP_PRIVILEGES: return getGroupPrivileges(); case ROLE_PRIVILEGES: return getRolePrivileges(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case USER_PRIVILEGES: return isSetUserPrivileges(); case GROUP_PRIVILEGES: return isSetGroupPrivileges(); case ROLE_PRIVILEGES: return isSetRolePrivileges(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof PrincipalPrivilegeSet) return this.equals((PrincipalPrivilegeSet)that); return false; } public boolean equals(PrincipalPrivilegeSet that) { if (that == null) return false; if (this == that) return true; boolean this_present_userPrivileges = true && this.isSetUserPrivileges(); boolean that_present_userPrivileges = true && that.isSetUserPrivileges(); if (this_present_userPrivileges || that_present_userPrivileges) { if (!(this_present_userPrivileges && that_present_userPrivileges)) return false; if (!this.userPrivileges.equals(that.userPrivileges)) return false; } boolean this_present_groupPrivileges = true && this.isSetGroupPrivileges(); boolean that_present_groupPrivileges = true && that.isSetGroupPrivileges(); if (this_present_groupPrivileges || that_present_groupPrivileges) { if (!(this_present_groupPrivileges && that_present_groupPrivileges)) return false; if (!this.groupPrivileges.equals(that.groupPrivileges)) return false; } boolean this_present_rolePrivileges = true && this.isSetRolePrivileges(); boolean that_present_rolePrivileges = true && that.isSetRolePrivileges(); if (this_present_rolePrivileges || that_present_rolePrivileges) { if (!(this_present_rolePrivileges && that_present_rolePrivileges)) return false; if (!this.rolePrivileges.equals(that.rolePrivileges)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetUserPrivileges()) ? 131071 : 524287); if (isSetUserPrivileges()) hashCode = hashCode * 8191 + userPrivileges.hashCode(); hashCode = hashCode * 8191 + ((isSetGroupPrivileges()) ? 131071 : 524287); if (isSetGroupPrivileges()) hashCode = hashCode * 8191 + groupPrivileges.hashCode(); hashCode = hashCode * 8191 + ((isSetRolePrivileges()) ? 131071 : 524287); if (isSetRolePrivileges()) hashCode = hashCode * 8191 + rolePrivileges.hashCode(); return hashCode; } @Override public int compareTo(PrincipalPrivilegeSet other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetUserPrivileges(), other.isSetUserPrivileges()); if (lastComparison != 0) { return lastComparison; } if (isSetUserPrivileges()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userPrivileges, other.userPrivileges); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetGroupPrivileges(), other.isSetGroupPrivileges()); if (lastComparison != 0) { return lastComparison; } if (isSetGroupPrivileges()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.groupPrivileges, other.groupPrivileges); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetRolePrivileges(), other.isSetRolePrivileges()); if (lastComparison != 0) { return lastComparison; } if (isSetRolePrivileges()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.rolePrivileges, other.rolePrivileges); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("PrincipalPrivilegeSet("); boolean first = true; sb.append("userPrivileges:"); if (this.userPrivileges == null) { sb.append("null"); } else { sb.append(this.userPrivileges); } first = false; if (!first) sb.append(", "); sb.append("groupPrivileges:"); if (this.groupPrivileges == null) { sb.append("null"); } else { sb.append(this.groupPrivileges); } first = false; if (!first) sb.append(", "); sb.append("rolePrivileges:"); if (this.rolePrivileges == null) { sb.append("null"); } else { sb.append(this.rolePrivileges); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class PrincipalPrivilegeSetStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public PrincipalPrivilegeSetStandardScheme getScheme() { return new PrincipalPrivilegeSetStandardScheme(); } } private static class PrincipalPrivilegeSetStandardScheme extends org.apache.thrift.scheme.StandardScheme<PrincipalPrivilegeSet> { public void read(org.apache.thrift.protocol.TProtocol iprot, PrincipalPrivilegeSet struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // USER_PRIVILEGES if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map120 = iprot.readMapBegin(); struct.userPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(2*_map120.size); @org.apache.thrift.annotation.Nullable java.lang.String _key121; @org.apache.thrift.annotation.Nullable java.util.List<PrivilegeGrantInfo> _val122; for (int _i123 = 0; _i123 < _map120.size; ++_i123) { _key121 = iprot.readString(); { org.apache.thrift.protocol.TList _list124 = iprot.readListBegin(); _val122 = new java.util.ArrayList<PrivilegeGrantInfo>(_list124.size); @org.apache.thrift.annotation.Nullable PrivilegeGrantInfo _elem125; for (int _i126 = 0; _i126 < _list124.size; ++_i126) { _elem125 = new PrivilegeGrantInfo(); _elem125.read(iprot); _val122.add(_elem125); } iprot.readListEnd(); } struct.userPrivileges.put(_key121, _val122); } iprot.readMapEnd(); } struct.setUserPrivilegesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // GROUP_PRIVILEGES if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map127 = iprot.readMapBegin(); struct.groupPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(2*_map127.size); @org.apache.thrift.annotation.Nullable java.lang.String _key128; @org.apache.thrift.annotation.Nullable java.util.List<PrivilegeGrantInfo> _val129; for (int _i130 = 0; _i130 < _map127.size; ++_i130) { _key128 = iprot.readString(); { org.apache.thrift.protocol.TList _list131 = iprot.readListBegin(); _val129 = new java.util.ArrayList<PrivilegeGrantInfo>(_list131.size); @org.apache.thrift.annotation.Nullable PrivilegeGrantInfo _elem132; for (int _i133 = 0; _i133 < _list131.size; ++_i133) { _elem132 = new PrivilegeGrantInfo(); _elem132.read(iprot); _val129.add(_elem132); } iprot.readListEnd(); } struct.groupPrivileges.put(_key128, _val129); } iprot.readMapEnd(); } struct.setGroupPrivilegesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // ROLE_PRIVILEGES if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map134 = iprot.readMapBegin(); struct.rolePrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(2*_map134.size); @org.apache.thrift.annotation.Nullable java.lang.String _key135; @org.apache.thrift.annotation.Nullable java.util.List<PrivilegeGrantInfo> _val136; for (int _i137 = 0; _i137 < _map134.size; ++_i137) { _key135 = iprot.readString(); { org.apache.thrift.protocol.TList _list138 = iprot.readListBegin(); _val136 = new java.util.ArrayList<PrivilegeGrantInfo>(_list138.size); @org.apache.thrift.annotation.Nullable PrivilegeGrantInfo _elem139; for (int _i140 = 0; _i140 < _list138.size; ++_i140) { _elem139 = new PrivilegeGrantInfo(); _elem139.read(iprot); _val136.add(_elem139); } iprot.readListEnd(); } struct.rolePrivileges.put(_key135, _val136); } iprot.readMapEnd(); } struct.setRolePrivilegesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, PrincipalPrivilegeSet struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.userPrivileges != null) { oprot.writeFieldBegin(USER_PRIVILEGES_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.LIST, struct.userPrivileges.size())); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> _iter141 : struct.userPrivileges.entrySet()) { oprot.writeString(_iter141.getKey()); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _iter141.getValue().size())); for (PrivilegeGrantInfo _iter142 : _iter141.getValue()) { _iter142.write(oprot); } oprot.writeListEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.groupPrivileges != null) { oprot.writeFieldBegin(GROUP_PRIVILEGES_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.LIST, struct.groupPrivileges.size())); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> _iter143 : struct.groupPrivileges.entrySet()) { oprot.writeString(_iter143.getKey()); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _iter143.getValue().size())); for (PrivilegeGrantInfo _iter144 : _iter143.getValue()) { _iter144.write(oprot); } oprot.writeListEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.rolePrivileges != null) { oprot.writeFieldBegin(ROLE_PRIVILEGES_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.LIST, struct.rolePrivileges.size())); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> _iter145 : struct.rolePrivileges.entrySet()) { oprot.writeString(_iter145.getKey()); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _iter145.getValue().size())); for (PrivilegeGrantInfo _iter146 : _iter145.getValue()) { _iter146.write(oprot); } oprot.writeListEnd(); } } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class PrincipalPrivilegeSetTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public PrincipalPrivilegeSetTupleScheme getScheme() { return new PrincipalPrivilegeSetTupleScheme(); } } private static class PrincipalPrivilegeSetTupleScheme extends org.apache.thrift.scheme.TupleScheme<PrincipalPrivilegeSet> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, PrincipalPrivilegeSet struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetUserPrivileges()) { optionals.set(0); } if (struct.isSetGroupPrivileges()) { optionals.set(1); } if (struct.isSetRolePrivileges()) { optionals.set(2); } oprot.writeBitSet(optionals, 3); if (struct.isSetUserPrivileges()) { { oprot.writeI32(struct.userPrivileges.size()); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> _iter147 : struct.userPrivileges.entrySet()) { oprot.writeString(_iter147.getKey()); { oprot.writeI32(_iter147.getValue().size()); for (PrivilegeGrantInfo _iter148 : _iter147.getValue()) { _iter148.write(oprot); } } } } } if (struct.isSetGroupPrivileges()) { { oprot.writeI32(struct.groupPrivileges.size()); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> _iter149 : struct.groupPrivileges.entrySet()) { oprot.writeString(_iter149.getKey()); { oprot.writeI32(_iter149.getValue().size()); for (PrivilegeGrantInfo _iter150 : _iter149.getValue()) { _iter150.write(oprot); } } } } } if (struct.isSetRolePrivileges()) { { oprot.writeI32(struct.rolePrivileges.size()); for (java.util.Map.Entry<java.lang.String, java.util.List<PrivilegeGrantInfo>> _iter151 : struct.rolePrivileges.entrySet()) { oprot.writeString(_iter151.getKey()); { oprot.writeI32(_iter151.getValue().size()); for (PrivilegeGrantInfo _iter152 : _iter151.getValue()) { _iter152.write(oprot); } } } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, PrincipalPrivilegeSet struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(3); if (incoming.get(0)) { { org.apache.thrift.protocol.TMap _map153 = iprot.readMapBegin(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.LIST); struct.userPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(2*_map153.size); @org.apache.thrift.annotation.Nullable java.lang.String _key154; @org.apache.thrift.annotation.Nullable java.util.List<PrivilegeGrantInfo> _val155; for (int _i156 = 0; _i156 < _map153.size; ++_i156) { _key154 = iprot.readString(); { org.apache.thrift.protocol.TList _list157 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); _val155 = new java.util.ArrayList<PrivilegeGrantInfo>(_list157.size); @org.apache.thrift.annotation.Nullable PrivilegeGrantInfo _elem158; for (int _i159 = 0; _i159 < _list157.size; ++_i159) { _elem158 = new PrivilegeGrantInfo(); _elem158.read(iprot); _val155.add(_elem158); } } struct.userPrivileges.put(_key154, _val155); } } struct.setUserPrivilegesIsSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TMap _map160 = iprot.readMapBegin(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.LIST); struct.groupPrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(2*_map160.size); @org.apache.thrift.annotation.Nullable java.lang.String _key161; @org.apache.thrift.annotation.Nullable java.util.List<PrivilegeGrantInfo> _val162; for (int _i163 = 0; _i163 < _map160.size; ++_i163) { _key161 = iprot.readString(); { org.apache.thrift.protocol.TList _list164 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); _val162 = new java.util.ArrayList<PrivilegeGrantInfo>(_list164.size); @org.apache.thrift.annotation.Nullable PrivilegeGrantInfo _elem165; for (int _i166 = 0; _i166 < _list164.size; ++_i166) { _elem165 = new PrivilegeGrantInfo(); _elem165.read(iprot); _val162.add(_elem165); } } struct.groupPrivileges.put(_key161, _val162); } } struct.setGroupPrivilegesIsSet(true); } if (incoming.get(2)) { { org.apache.thrift.protocol.TMap _map167 = iprot.readMapBegin(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.LIST); struct.rolePrivileges = new java.util.HashMap<java.lang.String,java.util.List<PrivilegeGrantInfo>>(2*_map167.size); @org.apache.thrift.annotation.Nullable java.lang.String _key168; @org.apache.thrift.annotation.Nullable java.util.List<PrivilegeGrantInfo> _val169; for (int _i170 = 0; _i170 < _map167.size; ++_i170) { _key168 = iprot.readString(); { org.apache.thrift.protocol.TList _list171 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); _val169 = new java.util.ArrayList<PrivilegeGrantInfo>(_list171.size); @org.apache.thrift.annotation.Nullable PrivilegeGrantInfo _elem172; for (int _i173 = 0; _i173 < _list171.size; ++_i173) { _elem172 = new PrivilegeGrantInfo(); _elem172.read(iprot); _val169.add(_elem172); } } struct.rolePrivileges.put(_key168, _val169); } } struct.setRolePrivilegesIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
googleapis/google-cloud-java
37,048
java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/UsableSubnetworkSecondaryRange.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1/cluster_service.proto // Protobuf Java Version: 3.25.8 package com.google.container.v1; /** * * * <pre> * Secondary IP range of a usable subnetwork. * </pre> * * Protobuf type {@code google.container.v1.UsableSubnetworkSecondaryRange} */ public final class UsableSubnetworkSecondaryRange extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1.UsableSubnetworkSecondaryRange) UsableSubnetworkSecondaryRangeOrBuilder { private static final long serialVersionUID = 0L; // Use UsableSubnetworkSecondaryRange.newBuilder() to construct. private UsableSubnetworkSecondaryRange( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UsableSubnetworkSecondaryRange() { rangeName_ = ""; ipCidrRange_ = ""; status_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UsableSubnetworkSecondaryRange(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_UsableSubnetworkSecondaryRange_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_UsableSubnetworkSecondaryRange_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.UsableSubnetworkSecondaryRange.class, com.google.container.v1.UsableSubnetworkSecondaryRange.Builder.class); } /** * * * <pre> * Status shows the current usage of a secondary IP range. * </pre> * * Protobuf enum {@code google.container.v1.UsableSubnetworkSecondaryRange.Status} */ public enum Status implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * UNKNOWN is the zero value of the Status enum. It's not a valid status. * </pre> * * <code>UNKNOWN = 0;</code> */ UNKNOWN(0), /** * * * <pre> * UNUSED denotes that this range is unclaimed by any cluster. * </pre> * * <code>UNUSED = 1;</code> */ UNUSED(1), /** * * * <pre> * IN_USE_SERVICE denotes that this range is claimed by cluster(s) for * services. User-managed services range can be shared between clusters * within the same subnetwork. * </pre> * * <code>IN_USE_SERVICE = 2;</code> */ IN_USE_SERVICE(2), /** * * * <pre> * IN_USE_SHAREABLE_POD denotes this range was created by the network admin * and is currently claimed by a cluster for pods. It can only be used by * other clusters as a pod range. * </pre> * * <code>IN_USE_SHAREABLE_POD = 3;</code> */ IN_USE_SHAREABLE_POD(3), /** * * * <pre> * IN_USE_MANAGED_POD denotes this range was created by GKE and is claimed * for pods. It cannot be used for other clusters. * </pre> * * <code>IN_USE_MANAGED_POD = 4;</code> */ IN_USE_MANAGED_POD(4), UNRECOGNIZED(-1), ; /** * * * <pre> * UNKNOWN is the zero value of the Status enum. It's not a valid status. * </pre> * * <code>UNKNOWN = 0;</code> */ public static final int UNKNOWN_VALUE = 0; /** * * * <pre> * UNUSED denotes that this range is unclaimed by any cluster. * </pre> * * <code>UNUSED = 1;</code> */ public static final int UNUSED_VALUE = 1; /** * * * <pre> * IN_USE_SERVICE denotes that this range is claimed by cluster(s) for * services. User-managed services range can be shared between clusters * within the same subnetwork. * </pre> * * <code>IN_USE_SERVICE = 2;</code> */ public static final int IN_USE_SERVICE_VALUE = 2; /** * * * <pre> * IN_USE_SHAREABLE_POD denotes this range was created by the network admin * and is currently claimed by a cluster for pods. It can only be used by * other clusters as a pod range. * </pre> * * <code>IN_USE_SHAREABLE_POD = 3;</code> */ public static final int IN_USE_SHAREABLE_POD_VALUE = 3; /** * * * <pre> * IN_USE_MANAGED_POD denotes this range was created by GKE and is claimed * for pods. It cannot be used for other clusters. * </pre> * * <code>IN_USE_MANAGED_POD = 4;</code> */ public static final int IN_USE_MANAGED_POD_VALUE = 4; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Status valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Status forNumber(int value) { switch (value) { case 0: return UNKNOWN; case 1: return UNUSED; case 2: return IN_USE_SERVICE; case 3: return IN_USE_SHAREABLE_POD; case 4: return IN_USE_MANAGED_POD; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Status> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Status> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Status>() { public Status findValueByNumber(int number) { return Status.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.container.v1.UsableSubnetworkSecondaryRange.getDescriptor() .getEnumTypes() .get(0); } private static final Status[] VALUES = values(); public static Status valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Status(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.container.v1.UsableSubnetworkSecondaryRange.Status) } public static final int RANGE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object rangeName_ = ""; /** * * * <pre> * The name associated with this subnetwork secondary range, used when adding * an alias IP range to a VM instance. * </pre> * * <code>string range_name = 1;</code> * * @return The rangeName. */ @java.lang.Override public java.lang.String getRangeName() { java.lang.Object ref = rangeName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rangeName_ = s; return s; } } /** * * * <pre> * The name associated with this subnetwork secondary range, used when adding * an alias IP range to a VM instance. * </pre> * * <code>string range_name = 1;</code> * * @return The bytes for rangeName. */ @java.lang.Override public com.google.protobuf.ByteString getRangeNameBytes() { java.lang.Object ref = rangeName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rangeName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int IP_CIDR_RANGE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object ipCidrRange_ = ""; /** * * * <pre> * The range of IP addresses belonging to this subnetwork secondary range. * </pre> * * <code>string ip_cidr_range = 2;</code> * * @return The ipCidrRange. */ @java.lang.Override public java.lang.String getIpCidrRange() { java.lang.Object ref = ipCidrRange_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); ipCidrRange_ = s; return s; } } /** * * * <pre> * The range of IP addresses belonging to this subnetwork secondary range. * </pre> * * <code>string ip_cidr_range = 2;</code> * * @return The bytes for ipCidrRange. */ @java.lang.Override public com.google.protobuf.ByteString getIpCidrRangeBytes() { java.lang.Object ref = ipCidrRange_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); ipCidrRange_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STATUS_FIELD_NUMBER = 3; private int status_ = 0; /** * * * <pre> * This field is to determine the status of the secondary range programmably. * </pre> * * <code>.google.container.v1.UsableSubnetworkSecondaryRange.Status status = 3;</code> * * @return The enum numeric value on the wire for status. */ @java.lang.Override public int getStatusValue() { return status_; } /** * * * <pre> * This field is to determine the status of the secondary range programmably. * </pre> * * <code>.google.container.v1.UsableSubnetworkSecondaryRange.Status status = 3;</code> * * @return The status. */ @java.lang.Override public com.google.container.v1.UsableSubnetworkSecondaryRange.Status getStatus() { com.google.container.v1.UsableSubnetworkSecondaryRange.Status result = com.google.container.v1.UsableSubnetworkSecondaryRange.Status.forNumber(status_); return result == null ? com.google.container.v1.UsableSubnetworkSecondaryRange.Status.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rangeName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, rangeName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ipCidrRange_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, ipCidrRange_); } if (status_ != com.google.container.v1.UsableSubnetworkSecondaryRange.Status.UNKNOWN.getNumber()) { output.writeEnum(3, status_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rangeName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, rangeName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ipCidrRange_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, ipCidrRange_); } if (status_ != com.google.container.v1.UsableSubnetworkSecondaryRange.Status.UNKNOWN.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, status_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1.UsableSubnetworkSecondaryRange)) { return super.equals(obj); } com.google.container.v1.UsableSubnetworkSecondaryRange other = (com.google.container.v1.UsableSubnetworkSecondaryRange) obj; if (!getRangeName().equals(other.getRangeName())) return false; if (!getIpCidrRange().equals(other.getIpCidrRange())) return false; if (status_ != other.status_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RANGE_NAME_FIELD_NUMBER; hash = (53 * hash) + getRangeName().hashCode(); hash = (37 * hash) + IP_CIDR_RANGE_FIELD_NUMBER; hash = (53 * hash) + getIpCidrRange().hashCode(); hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + status_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.UsableSubnetworkSecondaryRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.container.v1.UsableSubnetworkSecondaryRange prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Secondary IP range of a usable subnetwork. * </pre> * * Protobuf type {@code google.container.v1.UsableSubnetworkSecondaryRange} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1.UsableSubnetworkSecondaryRange) com.google.container.v1.UsableSubnetworkSecondaryRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_UsableSubnetworkSecondaryRange_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_UsableSubnetworkSecondaryRange_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.UsableSubnetworkSecondaryRange.class, com.google.container.v1.UsableSubnetworkSecondaryRange.Builder.class); } // Construct using com.google.container.v1.UsableSubnetworkSecondaryRange.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; rangeName_ = ""; ipCidrRange_ = ""; status_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_UsableSubnetworkSecondaryRange_descriptor; } @java.lang.Override public com.google.container.v1.UsableSubnetworkSecondaryRange getDefaultInstanceForType() { return com.google.container.v1.UsableSubnetworkSecondaryRange.getDefaultInstance(); } @java.lang.Override public com.google.container.v1.UsableSubnetworkSecondaryRange build() { com.google.container.v1.UsableSubnetworkSecondaryRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1.UsableSubnetworkSecondaryRange buildPartial() { com.google.container.v1.UsableSubnetworkSecondaryRange result = new com.google.container.v1.UsableSubnetworkSecondaryRange(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.container.v1.UsableSubnetworkSecondaryRange result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.rangeName_ = rangeName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.ipCidrRange_ = ipCidrRange_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.status_ = status_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1.UsableSubnetworkSecondaryRange) { return mergeFrom((com.google.container.v1.UsableSubnetworkSecondaryRange) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1.UsableSubnetworkSecondaryRange other) { if (other == com.google.container.v1.UsableSubnetworkSecondaryRange.getDefaultInstance()) return this; if (!other.getRangeName().isEmpty()) { rangeName_ = other.rangeName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getIpCidrRange().isEmpty()) { ipCidrRange_ = other.ipCidrRange_; bitField0_ |= 0x00000002; onChanged(); } if (other.status_ != 0) { setStatusValue(other.getStatusValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { rangeName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { ipCidrRange_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { status_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object rangeName_ = ""; /** * * * <pre> * The name associated with this subnetwork secondary range, used when adding * an alias IP range to a VM instance. * </pre> * * <code>string range_name = 1;</code> * * @return The rangeName. */ public java.lang.String getRangeName() { java.lang.Object ref = rangeName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rangeName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name associated with this subnetwork secondary range, used when adding * an alias IP range to a VM instance. * </pre> * * <code>string range_name = 1;</code> * * @return The bytes for rangeName. */ public com.google.protobuf.ByteString getRangeNameBytes() { java.lang.Object ref = rangeName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rangeName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name associated with this subnetwork secondary range, used when adding * an alias IP range to a VM instance. * </pre> * * <code>string range_name = 1;</code> * * @param value The rangeName to set. * @return This builder for chaining. */ public Builder setRangeName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } rangeName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The name associated with this subnetwork secondary range, used when adding * an alias IP range to a VM instance. * </pre> * * <code>string range_name = 1;</code> * * @return This builder for chaining. */ public Builder clearRangeName() { rangeName_ = getDefaultInstance().getRangeName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The name associated with this subnetwork secondary range, used when adding * an alias IP range to a VM instance. * </pre> * * <code>string range_name = 1;</code> * * @param value The bytes for rangeName to set. * @return This builder for chaining. */ public Builder setRangeNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); rangeName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object ipCidrRange_ = ""; /** * * * <pre> * The range of IP addresses belonging to this subnetwork secondary range. * </pre> * * <code>string ip_cidr_range = 2;</code> * * @return The ipCidrRange. */ public java.lang.String getIpCidrRange() { java.lang.Object ref = ipCidrRange_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); ipCidrRange_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The range of IP addresses belonging to this subnetwork secondary range. * </pre> * * <code>string ip_cidr_range = 2;</code> * * @return The bytes for ipCidrRange. */ public com.google.protobuf.ByteString getIpCidrRangeBytes() { java.lang.Object ref = ipCidrRange_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); ipCidrRange_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The range of IP addresses belonging to this subnetwork secondary range. * </pre> * * <code>string ip_cidr_range = 2;</code> * * @param value The ipCidrRange to set. * @return This builder for chaining. */ public Builder setIpCidrRange(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ipCidrRange_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The range of IP addresses belonging to this subnetwork secondary range. * </pre> * * <code>string ip_cidr_range = 2;</code> * * @return This builder for chaining. */ public Builder clearIpCidrRange() { ipCidrRange_ = getDefaultInstance().getIpCidrRange(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The range of IP addresses belonging to this subnetwork secondary range. * </pre> * * <code>string ip_cidr_range = 2;</code> * * @param value The bytes for ipCidrRange to set. * @return This builder for chaining. */ public Builder setIpCidrRangeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ipCidrRange_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int status_ = 0; /** * * * <pre> * This field is to determine the status of the secondary range programmably. * </pre> * * <code>.google.container.v1.UsableSubnetworkSecondaryRange.Status status = 3;</code> * * @return The enum numeric value on the wire for status. */ @java.lang.Override public int getStatusValue() { return status_; } /** * * * <pre> * This field is to determine the status of the secondary range programmably. * </pre> * * <code>.google.container.v1.UsableSubnetworkSecondaryRange.Status status = 3;</code> * * @param value The enum numeric value on the wire for status to set. * @return This builder for chaining. */ public Builder setStatusValue(int value) { status_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * This field is to determine the status of the secondary range programmably. * </pre> * * <code>.google.container.v1.UsableSubnetworkSecondaryRange.Status status = 3;</code> * * @return The status. */ @java.lang.Override public com.google.container.v1.UsableSubnetworkSecondaryRange.Status getStatus() { com.google.container.v1.UsableSubnetworkSecondaryRange.Status result = com.google.container.v1.UsableSubnetworkSecondaryRange.Status.forNumber(status_); return result == null ? com.google.container.v1.UsableSubnetworkSecondaryRange.Status.UNRECOGNIZED : result; } /** * * * <pre> * This field is to determine the status of the secondary range programmably. * </pre> * * <code>.google.container.v1.UsableSubnetworkSecondaryRange.Status status = 3;</code> * * @param value The status to set. * @return This builder for chaining. */ public Builder setStatus(com.google.container.v1.UsableSubnetworkSecondaryRange.Status value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; status_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * This field is to determine the status of the secondary range programmably. * </pre> * * <code>.google.container.v1.UsableSubnetworkSecondaryRange.Status status = 3;</code> * * @return This builder for chaining. */ public Builder clearStatus() { bitField0_ = (bitField0_ & ~0x00000004); status_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1.UsableSubnetworkSecondaryRange) } // @@protoc_insertion_point(class_scope:google.container.v1.UsableSubnetworkSecondaryRange) private static final com.google.container.v1.UsableSubnetworkSecondaryRange DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1.UsableSubnetworkSecondaryRange(); } public static com.google.container.v1.UsableSubnetworkSecondaryRange getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UsableSubnetworkSecondaryRange> PARSER = new com.google.protobuf.AbstractParser<UsableSubnetworkSecondaryRange>() { @java.lang.Override public UsableSubnetworkSecondaryRange parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UsableSubnetworkSecondaryRange> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UsableSubnetworkSecondaryRange> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1.UsableSubnetworkSecondaryRange getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/derby
36,197
java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/lang/CheckConstraintTest.java
/* Derby - Class org.apache.derbyTesting.functionTests.tests.lang.CheckConstraintTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.lang; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import junit.framework.Test; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.BaseTestSuite; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.SupportFilesSetup; import org.apache.derbyTesting.junit.TestConfiguration; public final class CheckConstraintTest extends BaseJDBCTestCase { // poached from GeneratedColumnsTest private static final String IMPORT_FILE_NAME = "t_bi_1.dat"; /** * Public constructor required for running test as standalone JUnit. */ public CheckConstraintTest(String name) { super(name); } public static Test suite() { BaseTestSuite suite = new BaseTestSuite("checkConstraint Test"); suite.addTest(TestConfiguration.defaultSuite(CheckConstraintTest.class)); return new SupportFilesSetup( suite, new String [] { "functionTests/tests/lang/" + IMPORT_FILE_NAME }); } public void testNotAllowedInCheckConstraints() throws Exception { Statement st = createStatement(); setAutoCommit(false); // negative The following are not allowed in check // constraints: ?, subquery, datetime functions assertStatementError("42Y39", st, "create table neg1(c1 int check(?))"); assertStatementError("42Y39", st, " create table neg1(c1 int check(c1 in (select c1 " + "from neg1)))"); assertStatementError("42Y39", st, " create table neg1(c1 int check(CURRENT_DATE = " + "CURRENT_DATE))"); assertStatementError("42Y39", st, " create table neg1(c1 int check(CURRENT_TIME = " + "CURRENT_TIME))"); assertStatementError("42Y39", st, " create table neg1(c1 int check(CURRENT_TIMESTAMP = " + "CURRENT_TIMESTAMP))"); // The check constraint definition must evaluate to a boolean assertStatementError("42X19", st, "create table neg1(c1 int check(c1))"); assertStatementError("42X19", st, " create table neg1(c1 int check(1))"); assertStatementError("42X19", st, " create table neg1(c1 int check(c1+c1))"); // All column references are to target table assertStatementError("42X04", st, "create table neg1(c1 int check((c2 = 1)))"); // verify that a check constraint can't be used as an // optimizer override st.executeUpdate( "create table t1(c1 int constraint asdf check(c1 = 1))"); assertStatementError("42Y48", st, " select * from t1 --derby-properties constraint = asdf "); // alter table t1 drop constraint asdf rollback(); // alter table t1 drop constraint asdf forward references // should fail assertStatementError("42621", st, "create table neg1(c1 int check(c2 = 1), c2 int)"); assertStatementError("42621", st, " create table neg2(c1 int constraint asdf check(c2 " + "= 1), c2 int)"); rollback(); } public void testCheckConstraints() throws SQLException{ Statement st = createStatement(); setAutoCommit(false); // positive multiple check constraints on same table st.executeUpdate( "create table pos1(c1 int check(c1 > 0), constraint " + "asdf check(c1 < 10))"); // verify both constraints are enforced assertStatementError("23513", st, "insert into pos1 values 0"); st.executeUpdate( " insert into pos1 values 1"); st.executeUpdate( " insert into pos1 values 9"); assertStatementError("23513", st, " insert into pos1 values 10"); ResultSet rs = st.executeQuery("select * from pos1"); String[] expColNames = {"C1"}; JDBC.assertColumnNames(rs, expColNames); String[][] expRS = { {"1"}, {"9"} }; JDBC.assertFullResultSet(rs, expRS, true); // verify constraint violation rolls back entire statement assertStatementError("23513", st, "update pos1 set c1 = c1 + 1"); rs = st.executeQuery( " select * from pos1"); expColNames = new String [] {"C1"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"9"} }; JDBC.assertFullResultSet(rs, expRS, true); assertStatementError("23513", st, " update pos1 set c1 = c1 - 1"); rs = st.executeQuery( " select * from pos1"); expColNames = new String [] {"C1"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"9"} }; JDBC.assertFullResultSet(rs, expRS, true); rollback(); // conflicting constraints, should fail st.executeUpdate( "create table negcks(c1 int constraint ck1st " + "check(c1 > 4), c2 int constraint ck2nd check(c2 > " + "2), c3 int, constraint ckLast check(c2 > c1))"); // constraint ck1st fails assertStatementError("23513", st, "insert into negcks values (1, 3, 3)"); // constraint ckLast fails (ck2nd fails too) assertStatementError("23513", st, "insert into negcks values (5, 1, 3)"); // constraint ck1st fails (ckLast fails too) assertStatementError("23513", st, "insert into negcks values (2, 3, 3)"); rollback(); // same source and target tables st.executeUpdate( "create table pos1(c1 int, c2 int, constraint ck1 " + "check (c1 < c2))"); st.executeUpdate( " insert into pos1 values (1, 2), (2, 3), (3, 4)"); commit(); // these should work st.executeUpdate( "insert into pos1 select * from pos1"); rs = st.executeQuery( " select count(*) from pos1"); expColNames = new String [] {"1"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"6"} }; JDBC.assertFullResultSet(rs, expRS, true); assertUpdateCount(st, 6, " update pos1 set c2 = (select max(c1) from pos1), " + "c1 = (select min(c2) from pos1)"); rs = st.executeQuery( " select * from pos1"); expColNames = new String [] {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"2", "3"}, {"2", "3"}, {"2", "3"}, {"2", "3"}, {"2", "3"}, {"2", "3"} }; JDBC.assertFullResultSet(rs, expRS, true); rollback(); // these should fail assertStatementError("23513", st, "insert into pos1 select c2, c1 from pos1"); rs = st.executeQuery( " select count(*) from pos1"); expColNames = new String [] {"1"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"3"} }; JDBC.assertFullResultSet(rs, expRS, true); assertStatementError("23513", st, " update pos1 set c2 = (select min(c1) from pos1), " + "c1 = (select max(c2) from pos1)"); rs = st.executeQuery( " select * from pos1"); expColNames = new String [] {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1", "2"}, {"2", "3"}, {"3", "4"} }; JDBC.assertFullResultSet(rs, expRS, true); st.executeUpdate( " drop table pos1"); commit(); // union under insert st.executeUpdate( "create table t1(c1 int, c2 int, constraint ck1 " + "check(c1 = c2))"); assertStatementError("23513", st, " insert into t1 values (1, 1), (2, 1)"); rs = st.executeQuery( " select * from t1"); expColNames = new String [] {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); JDBC.assertDrainResults(rs, 0); // normalize result set under insert/update st.executeUpdate( "insert into t1 values (1.0, 1)"); assertStatementError("23513", st, " insert into t1 values (2.0, 1)"); rs = st.executeQuery( " select * from t1"); expColNames = new String [] {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1", "1"} }; JDBC.assertFullResultSet(rs, expRS, true); assertUpdateCount(st, 1, " update t1 set c2 = 1.0"); assertStatementError("23513", st, " update t1 set c2 = 2.0"); rs = st.executeQuery( " select * from t1"); expColNames = new String [] {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1", "1"} }; JDBC.assertFullResultSet(rs, expRS, true); assertUpdateCount(st, 1, " update t1 set c1 = 3.0, c2 = 3.0"); rs = st.executeQuery( " select * from t1"); expColNames = new String [] {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"3", "3"} }; JDBC.assertFullResultSet(rs, expRS, true); rollback(); } public void testPositionalUpdate() throws SQLException{ Statement st = createStatement(); setAutoCommit(false); // positioned update st.executeUpdate( "create table t1(c1 int, c2 int, constraint ck1 " + "check(c1 = c2), constraint ck2 check(c2=c1))"); st.executeUpdate( " insert into t1 values (1, 1), (2, 2), (3, 3), (4, 4)"); st.executeUpdate( " create index i1 on t1(c1)"); Statement st1 = createStatement(); st1.setCursorName("c1"); ResultSet rs1 = st1.executeQuery( "select * from t1 where c2=2 for update of C1"); rs1.next(); setAutoCommit(false); // this update should succeed assertUpdateCount(st,1, "update t1 set c1 = c1 where current of \"c1\""); // this update should fail assertStatementError("23513", st, "update t1 set c1 = c1 + 1 where current of \"c1\""); st1.close(); rs1.close(); Statement st2 = createStatement(); st2.setCursorName("c2"); ResultSet rs2 = st2.executeQuery( "select * from t1 where c1 = 2 for update of c2"); rs2.next(); setAutoCommit(false); // this update should succeed assertUpdateCount(st,1, "update t1 set c2 = c2 where current of \"c2\""); // this update should fail assertStatementError("23513", st, "update t1 set c2 = c2 + 1 where current of \"c2\""); st2.close(); rs2.close(); Statement st3 = createStatement(); st3.setCursorName("c3"); ResultSet rs3 = st3.executeQuery( "select * from t1 where c1 = 2 for update of c1, c2"); rs3.next(); setAutoCommit(false); // this update should succeed assertUpdateCount(st, 1, "update t1 set c2 = c1, c1 = c2 where current of \"c3\""); // this update should fail assertStatementError("23513", st, "update t1 set c2 = c2 + 1, c1 = c1 + 3 where current of \"c3\""); // this update should succeed assertUpdateCount(st, 1, "update t1 set c2 = c1 + 3, c1 = c2 + 3 where current of \"c3\""); st3.close(); rs3.close(); ResultSet rs = st.executeQuery("select * from t1"); String[] expColNames = {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); String[][] expRS = { {"1", "1"}, {"5", "5"}, {"3", "3"}, {"4", "4"} }; JDBC.assertFullResultSet(rs, expRS, true); rollback(); // complex expressions st.executeUpdate( "create table t1(c1 int check((c1 + c1) = (c1 * c1) " + "or (c1 + c1)/2 = (c1 * c1)), c2 int)"); // this insert should succeed st.executeUpdate( "insert into t1 values (1, 9), (2, 10)"); // these updates should succeed assertUpdateCount(st, 2, "update t1 set c2 = c2 * c2"); assertUpdateCount(st, 1, " update t1 set c1 = 2 where c1 = 1"); assertUpdateCount(st, 2, " update t1 set c1 = 1 where c1 = 2"); // this update should fail assertStatementError("23513", st, "update t1 set c1 = c2"); rs = st.executeQuery( " select * from t1"); expColNames = new String [] {"C1", "C2"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1", "81"}, {"1", "100"} }; JDBC.assertFullResultSet(rs, expRS, true); rollback(); } public void testBuiltInFunctions() throws SQLException{ Statement st = createStatement(); setAutoCommit(false); // built-in functions in a check constraint st.executeUpdate( "create table charTab (c1 char(4) check(CHAR(c1) = c1))"); st.executeUpdate( " insert into charTab values 'asdf'"); st.executeUpdate( " insert into charTab values 'fdsa'"); // beetle 5805 - support built-in function INT should fail // until beetle 5805 is implemented st.executeUpdate( "create table intTab (c1 int check(INT(1) = c1))"); st.executeUpdate( " insert into intTab values 1"); // this insert should fail, does not satisfy check constraint assertStatementError("23513", st, "insert into intTab values 2"); st.executeUpdate( " create table maxIntTab (c1 int check(INT(2147483647) > c1))"); st.executeUpdate( " insert into maxIntTab values 1"); // this insert should fail, does not satisfy check constraint assertStatementError("23513", st, "insert into maxIntTab values 2147483647"); rollback(); // verify that inserts, updates and statements with forced // constraints are indeed dependent on the constraints st.executeUpdate( "create table t1(c1 int not null constraint asdf primary key)"); st.executeUpdate( " insert into t1 values 1, 2, 3, 4, 5"); commit(); PreparedStatement pSt1 = prepareStatement( "insert into t1 values 1"); PreparedStatement pSt2 = prepareStatement( "update t1 set c1 = 3 where c1 = 4"); PreparedStatement pSt3 = prepareStatement( "select * from t1"); // the insert and update should fail, select should succeed assertStatementError("23505", pSt1); assertStatementError("23505", pSt2); ResultSet rs = pSt3.executeQuery(); String[] expColNames = {"C1"}; JDBC.assertColumnNames(rs, expColNames); String[][] expRS = { {"1"}, {"2"}, {"3"}, {"4"}, {"5"} }; JDBC.assertFullResultSet(rs, expRS, true); st.executeUpdate( " alter table t1 drop constraint asdf"); // rollback and verify that constraints are enforced and // select succeeds rollback(); assertStatementError("23505", pSt1); assertStatementError("23505", pSt2); rs = pSt3.executeQuery(); expColNames = new String [] {"C1"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"2"}, {"3"}, {"4"}, {"5"} }; JDBC.assertFullResultSet(rs, expRS, true); st.executeUpdate( " drop table t1"); // check constraints with parameters st.executeUpdate( "create table t1(c1 int constraint asdf check(c1 = 1))"); PreparedStatement pSt = prepareStatement( "insert into t1 values (?)"); rs = st.executeQuery( "values (1)"); rs.next(); ResultSetMetaData rsmd = rs.getMetaData(); for (int i = 1; i <= rsmd.getColumnCount(); i++) pSt.setObject(i, rs.getObject(i)); assertUpdateCount(pSt, 1); // clean up st.executeUpdate( "drop table t1"); st.executeUpdate( " create table t1(active_flag char(2) " + "check(active_flag IN ('Y', 'N')), " + "araccount_active_flag char(2) " + "check(araccount_active_flag IN ('Y', 'N')), " + "automatic_refill_flag char(2) " + "check(automatic_refill_flag IN ('Y', 'N')), " + "call_when_ready_flag char(2) " + "check(call_when_ready_flag IN ('Y', 'N')), " + "compliance_flag char(2) check(compliance_flag IN " + "('Y', 'N')), delivery_flag char(2) " + "check(delivery_flag IN ('Y', 'N')), " + "double_count_flag char(2) check(double_count_flag " + "IN ('Y', 'N')), gender_ind char(2) check(gender_ind " + "IN ('M', 'F', 'U')), geriatric_flag char(2) " + "check(geriatric_flag IN ('Y', 'N')), " + "refuse_inquiry_flag char(2) " + "check(refuse_inquiry_flag IN ('Y', 'N')), " + "animal_flag char(2) check(animal_flag IN ('Y', " + "'N')), terminal_flag char(2) check(terminal_flag IN " + "('Y', 'N')), unit_flag char(2) check(unit_flag IN " + "('Y', 'N')), VIP_flag char(2) check(VIP_flag IN " + "('Y', 'N')), snap_cap_flag char(2) " + "check(snap_cap_flag IN ('Y', 'N')), " + "consent_on_file_flag char(2) " + "check(consent_on_file_flag IN ('Y', 'N')), " + "enlarged_SIG_flag char(2) check(enlarged_SIG_flag " + "IN ('Y', 'N')),aquired_patient_flag char(2) " + "check(aquired_patient_flag IN ('Y', 'N')))"); // bug 5622 - internal generated constraint names are // re-worked to match db2's naming convention. st.executeUpdate( "drop table t1"); st.executeUpdate( " create table t1 (c1 int not null primary key, c2 " + "int not null unique, c3 int check (c3>=0))"); st.executeUpdate( " alter table t1 add column c4 int not null default 1"); st.executeUpdate( " alter table t1 add constraint c4_unique UNIQUE(c4)"); st.executeUpdate( " alter table t1 add column c5 int check(c5 >= 0)"); rs = st.executeQuery( " select c.type from " + "sys.sysconstraints c, sys.systables t where " + "c.tableid = t.tableid and tablename='T1'"); expColNames = new String [] {"TYPE"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"P"}, {"U"}, {"C"}, {"U"}, {"C"} }; JDBC.assertUnorderedResultSet(rs, expRS, true); assertStatementError("42Y55", st, " drop table t2"); st.executeUpdate( " create table t2 (c21 int references t1)"); rs = st.executeQuery( " select c.type from " + "sys.sysconstraints c, sys.systables t where " + "c.tableid = t.tableid and tablename='T2'"); expColNames = new String [] {"TYPE"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"F"} }; JDBC.assertFullResultSet(rs, expRS, true); assertStatementError("42Y55", st, " drop table t3"); st.executeUpdate( " create table t3 (c1 int check (c1 >= 0), c2 int " + "check (c2 >= 0), c3 int check (c3 >= 0), c4 int " + "check (c4 >= 0), c5 int check (c5 >= 0), c6 int " + "check (c6 >= 0), c7 int check (c7 >= 0), c8 int " + "check (c8 >= 0), c9 int check (c9 >= 0), c10 int " + "check (c10 >= 0), c11 int check (c11 >= 0), c12 int " + "check (c12 >= 0), c13 int check (c13 >= 0))"); rs = st.executeQuery( " select c.type from " + "sys.sysconstraints c, sys.systables t where " + "c.tableid = t.tableid and tablename='T3'"); expColNames = new String [] {"TYPE"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"}, {"C"} }; JDBC.assertFullResultSet(rs, expRS, true); assertStatementError("42Y55", st, " drop table t4"); st.executeUpdate( " create table t4(c11 int not null, c12 int not " + "null, primary key (c11, c12))"); rs = st.executeQuery( " select c.type from " + "sys.sysconstraints c, sys.systables t where " + "c.tableid = t.tableid and tablename='T4'"); expColNames = new String [] {"TYPE"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"P"} }; JDBC.assertFullResultSet(rs, expRS, true); // Cleanup: st.executeUpdate("drop table t4"); st.executeUpdate("drop table t3"); st.executeUpdate("drop table t2"); st.executeUpdate("drop table t1"); commit(); // DERBY-2989 } public void testJira2989() throws SQLException{ Statement st = createStatement(); setAutoCommit(false); st.executeUpdate( "CREATE TABLE \"indicator\" (c CHAR(1) DEFAULT 'N')"); st.executeUpdate( " ALTER TABLE \"indicator\" ADD CONSTRAINT " + "my_constraint CHECK ((c IN ('Y','N')))"); st.executeUpdate( " INSERT INTO \"indicator\" VALUES ('N')"); st.executeUpdate( " ALTER TABLE \"indicator\" DROP CONSTRAINT my_constraint"); st.executeUpdate( " DROP TABLE \"indicator\""); getConnection().rollback(); st.close(); } public void testJira4282() throws SQLException { // This test doesnt work properly in the embedded configuration. // The intent of the test is to expose the DERBY-4282 problem, and // this test case does do that in the client/server configuration, so // we only run the test in that configuration. In the embedded // configuration, the UPDATE statement unexpectedly gets a // "no current row" exception. // if (usingEmbedded()) return; Statement st = createStatement(); st.executeUpdate( "create table t4282(c1 int, c2 int, constraint ck1 " + "check(c1 = c2), constraint ck2 check(c2=c1))"); st.executeUpdate("insert into t4282 values (1,1),(2,2),(3,3),(4,4)"); Statement st1 = createStatement(); st1.setCursorName("c1"); ResultSet rs = st1.executeQuery("select * from t4282 for update"); assertTrue("Failed to retrieve row for update", rs.next()); // DERBY-4282 causes the next statement to fail with: // // Column 'C2' is either not in any table in the FROM list or // appears within a join specification and is outside the scope // of the join specification or appears in a HAVING clause and // is not in the GROUP BY list. If this is a CREATE or ALTER TABLE // statement then 'C2' is not a column in the target table. st.executeUpdate("update t4282 set c1 = c1 where current of \"c1\""); // If we get here, all is well, and DERBY-4282 did not occur. st1.close(); st.close(); dropTable("t4282"); } // This test verifies that if the PRIMARY KEY constraint mentions a // column which is potentially large, then Derby will automatically // choose a large pagesize for the index's conglomerate (DERBY-3947) // public void testPrimaryKeyPageSizeDerby3947() throws SQLException { Statement st = createStatement(); st.executeUpdate("create table d3947 (x varchar(1000) primary key)"); char[] chars = new char[994]; PreparedStatement ps = prepareStatement("insert into d3947 values (?)"); ps.setString(1, new String(chars)); ps.executeUpdate(); ps.close(); checkLargePageSize(st, "D3947"); st.executeUpdate("drop table d3947"); // A second variation is to add the PK constraint using ALTER TABLE; // A third variation is to add a FK constraint st.executeUpdate("create table d3947 (x varchar(1000) not null, " + " y varchar(1000))"); st.executeUpdate("alter table d3947 add constraint " + "constraint1 primary key (x)"); st.executeUpdate("alter table d3947 add constraint " + "constraint2 foreign key (y) references d3947(x)"); checkLargePageSize(st, "D3947"); // Ensure we still get the right error message when col doesn't exist: assertStatementError("42X14", st, "alter table d3947 add constraint " + "constraint3 foreign key (z) references d3947(x)"); st.executeUpdate("drop table d3947"); st.close(); } private void checkLargePageSize(Statement st, String tblName) throws SQLException { ResultSet rs = st.executeQuery( "select * from TABLE(SYSCS_DIAG.SPACE_TABLE('"+tblName+"')) T"); while (rs.next()) { if ("1".equals(rs.getString("isindex"))) assertEquals(32768, rs.getInt("pagesize")); else assertEquals(4096, rs.getInt("pagesize")); //System.out.println(rs.getString("conglomeratename") + // ","+rs.getString("isindex")+ // ","+rs.getString("pagesize")); } rs.close(); } /** * Test that CHECK constraint works if it contains unqualified names and * the current schema when the constraint is defined is different from the * schema in which the table lives. Regression test case for DERBY-6362. */ public void testDerby6362() throws SQLException { setAutoCommit(false); Statement s = createStatement(); s.execute("create schema d6362_s1"); s.execute("create schema d6362_s2"); s.execute("set schema d6362_s1"); s.execute("create function f(x int) returns int deterministic " + "language java parameter style java external name " + "'java.lang.Math.abs' no sql"); s.execute("create type typ " + "external name 'java.util.ArrayList' language java"); // Create the table with the constraints in a different schema than // the current schema. Before DERBY-6362, unqualified names would be // resolved to the current schema at definition time and to the // table's schema during execution, which made them behave unreliably // if the schemas differed. s.execute("create table d6362_s2.t(x int, " + "constraint c001 check(f(x) < 3))"); s.execute("alter table d6362_s2.t " + "add constraint c002 check(f(x) >= 0)"); s.execute("alter table d6362_s2.t " + "add constraint c003 check(cast(null as typ) is null)"); // Use a function that lives in the SYSFUN schema. s.execute("alter table d6362_s2.t add constraint c004 " + "check(f(x) > cos(pi()))"); // ABS is an operator, not a function, so it will not be qualified. s.execute("alter table d6362_s2.t add constraint c005 " + "check(abs(f(x)) < pi())"); // Add some constraints that reference the table. See that table // names are qualified. Unqualified column names will not be qualified // with schema and table. s.execute("set schema d6362_s2"); s.execute("alter table t add constraint c101 check(x < 3)"); s.execute("alter table t add constraint c102 check(t.x < 4)"); s.execute("alter table t add constraint c103 " + "check(x <= d6362_s1.f(t.x))"); // Add some fully qualified names to see that they still work. s.execute("alter table t add constraint c201 check(d6362_s2.t.x < 5)"); s.execute("alter table t add constraint c202 check(d6362_s1.f(x) < 5)"); s.execute("alter table t add constraint c203 " + "check(cast(null as d6362_s1.typ) is null)"); // Verify that the constraints were stored with fully qualified names. String[][] expectedConstraints = { {"C001", "(\"D6362_S1\".\"F\"(x) < 3)"}, {"C002", "(\"D6362_S1\".\"F\"(x) >= 0)"}, {"C003", "(cast(null as \"D6362_S1\".\"TYP\") is null)"}, {"C004", "(\"D6362_S1\".\"F\"(x) > \"SYSFUN\".\"COS\"(\"SYSFUN\".\"PI\"()))"}, {"C005", "(abs(\"D6362_S1\".\"F\"(x)) < \"SYSFUN\".\"PI\"())"}, {"C101", "(x < 3)"}, {"C102", "(\"D6362_S2\".\"T\".x < 4)"}, {"C103", "(x <= \"D6362_S1\".\"F\"(\"D6362_S2\".\"T\".x))"}, {"C201", "(\"D6362_S2\".\"T\".x < 5)"}, {"C202", "(\"D6362_S1\".\"F\"(x) < 5)"}, {"C203", "(cast(null as \"D6362_S1\".\"TYP\") is null)"}, }; JDBC.assertFullResultSet( s.executeQuery( "select constraintname, checkdefinition from sys.syschecks " + "natural join sys.sysconstraints natural join sys.sysschemas " + "where schemaname = 'D6362_S2' and type = 'C' " + "order by constraintname"), expectedConstraints); // Verify that constraints can be executed. Used to fail because // unqualified functions and types were resolved to the table's schema // instead of the current schema at the time the constraint was defined. s.execute("insert into t values 1,2"); assertStatementError("23513", s, "insert into t values -10"); assertStatementError("23513", s, "insert into t values 10"); } private void setupForBulkInsert(Statement s, int limit) throws SQLException { s.executeUpdate( "create table t_bi_1( a int, b int check (b < " + limit + "))"); s.executeUpdate( "create function func () returns int " + " language java parameter style java deterministic no sql " + " external name '" + this.getClass().getName() + ".func'"); } /** * DERBY-6453. Exercise hitherto untested code path in * InsertResultSet (call to evaluateCheckConstraints from * preprocessSourceRow used by bulkInsert) and verify IMPORT with * trigger (which would a priori mandate bulk insert, but is * changed due to normal inserts due to the presence of a * trigger). */ public void testbulkInsert() throws SQLException { setAutoCommit(false); Statement s = createStatement(); setupForBulkInsert(s, 0); assertStatementError( "23513", s, "call syscs_util.syscs_import_data( " + " null, " + " 'T_BI_1', " + " 'A, B', " + " '1, 2', " + " 'extin/" + IMPORT_FILE_NAME + "', " + " null, null, null, 0 )"); setupForBulkInsert(s, 10); s.executeUpdate( "create trigger dagstrigger no cascade before insert on t_bi_1 " + " values func() "); funcWasCalled = false; s.executeUpdate( "call syscs_util.syscs_import_data( " + " null, " + " 'T_BI_1', " + " 'A, B', " + " '1, 2', " + " 'extin/" + IMPORT_FILE_NAME + "', " + " null, null, null, 0 )"); assertTrue(funcWasCalled); } static boolean funcWasCalled; public static int func() { funcWasCalled = true; return 0; } }
apache/flink-cdc
37,281
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oracle-cdc/src/test/java/org/apache/flink/cdc/connectors/oracle/OracleSourceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.cdc.connectors.oracle; import org.apache.flink.api.common.state.BroadcastState; import org.apache.flink.api.common.state.KeyedStateStore; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.state.OperatorStateStore; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.cdc.connectors.oracle.source.OracleSourceTestBase; import org.apache.flink.cdc.connectors.utils.TestSourceContext; import org.apache.flink.cdc.debezium.DebeziumDeserializationSchema; import org.apache.flink.cdc.debezium.DebeziumSourceFunction; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.testutils.CheckedThread; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.StateSnapshotContextSynchronousImpl; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.MockStreamingRuntimeContext; import org.apache.flink.util.Collector; import org.apache.flink.util.Preconditions; import com.jayway.jsonpath.JsonPath; import org.apache.kafka.connect.source.SourceRecord; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.Statement; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.OptionalLong; import java.util.Properties; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import static org.apache.flink.cdc.connectors.utils.AssertUtils.assertDelete; import static org.apache.flink.cdc.connectors.utils.AssertUtils.assertInsert; import static org.apache.flink.cdc.connectors.utils.AssertUtils.assertRead; import static org.apache.flink.cdc.connectors.utils.AssertUtils.assertUpdate; /** Tests for {@link OracleSource} which also heavily tests {@link DebeziumSourceFunction}. */ class OracleSourceTest extends OracleSourceTestBase { private static final Logger LOG = LoggerFactory.getLogger(OracleSourceTest.class); @Test void testConsumingAllEvents() throws Exception { createAndInitialize("product.sql"); DebeziumSourceFunction<SourceRecord> source = createOracleLogminerSource(); TestSourceContext<SourceRecord> sourceContext = new TestSourceContext<>(); setupSource(source); try (Connection connection = getJdbcConnection(); Statement statement = connection.createStatement()) { // start the source final CheckedThread runThread = new CheckedThread() { @Override public void go() throws Exception { source.run(sourceContext); } }; runThread.start(); List<SourceRecord> records = drain(sourceContext, 9); Assertions.assertThat(records).hasSize(9); for (int i = 0; i < records.size(); i++) { assertRead(records.get(i), "ID", 101 + i); } statement.execute( "INSERT INTO debezium.products VALUES (110,'robot','Toy robot',1.304)"); // 110 records = drain(sourceContext, 1); assertInsert(records.get(0), "ID", 110); statement.execute( "INSERT INTO debezium.products VALUES (1001,'roy','old robot',1234.56)"); // 1001 records = drain(sourceContext, 1); assertInsert(records.get(0), "ID", 1001); // --------------------------------------------------------------------------------------------------------------- // Changing the primary key of a row should result in 2 events: INSERT, DELETE // (TOMBSTONE is dropped) // --------------------------------------------------------------------------------------------------------------- statement.execute( "UPDATE debezium.products SET id=2001, description='really old robot' WHERE id=1001"); records = drain(sourceContext, 2); assertDelete(records.get(0), "ID", 1001); assertInsert(records.get(1), "ID", 2001); // --------------------------------------------------------------------------------------------------------------- // Simple UPDATE (with no schema changes) // --------------------------------------------------------------------------------------------------------------- statement.execute("UPDATE debezium.products SET weight=1345.67 WHERE id=2001"); records = drain(sourceContext, 1); assertUpdate(records.get(0), "ID", 2001); // --------------------------------------------------------------------------------------------------------------- // Change our schema with a fully-qualified name; we should still see this event // --------------------------------------------------------------------------------------------------------------- // Add a column with default to the 'products' table and explicitly update one record // ... statement.execute( String.format("ALTER TABLE %s.products ADD volume FLOAT", "debezium")); statement.execute("UPDATE debezium.products SET volume=13.5 WHERE id=2001"); records = drain(sourceContext, 1); assertUpdate(records.get(0), "ID", 2001); // cleanup source.close(); runThread.sync(); } } @Test @Disabled("It can be open until DBZ-5245 and DBZ-4936 fix") void testCheckpointAndRestore() throws Exception { createAndInitialize("product.sql"); final TestingListState<byte[]> offsetState = new TestingListState<>(); final TestingListState<String> historyState = new TestingListState<>(); { // --------------------------------------------------------------------------- // Step-1: start the source from empty state // --------------------------------------------------------------------------- final DebeziumSourceFunction<SourceRecord> source = createOracleLogminerSource(); // we use blocking context to block the source to emit before last snapshot record final BlockingSourceContext<SourceRecord> sourceContext = new BlockingSourceContext<>(8); // setup source with empty state setupSource(source, false, offsetState, historyState, true, 0, 1); final CheckedThread runThread = new CheckedThread() { @Override public void go() throws Exception { source.run(sourceContext); } }; runThread.start(); // wait until consumer is started int received = drain(sourceContext, 2).size(); Assertions.assertThat(received).isEqualTo(2); // we can't perform checkpoint during DB snapshot Assertions.assertThat( waitForCheckpointLock( sourceContext.getCheckpointLock(), Duration.ofSeconds(3))) .isFalse(); // unblock the source context to continue the processing sourceContext.blocker.release(); // wait until the source finishes the database snapshot List<SourceRecord> records = drain(sourceContext, 9 - received); Assertions.assertThat(records.size() + received).isEqualTo(9); // state is still empty Assertions.assertThat(offsetState.list).isEmpty(); Assertions.assertThat(historyState.list).isEmpty(); // --------------------------------------------------------------------------- // Step-2: trigger checkpoint-1 after snapshot finished // --------------------------------------------------------------------------- synchronized (sourceContext.getCheckpointLock()) { // trigger checkpoint-1 source.snapshotState(new StateSnapshotContextSynchronousImpl(101, 101)); } assertHistoryState(historyState); Assertions.assertThat(offsetState.list).hasSize(1); String state = new String(offsetState.list.get(0), StandardCharsets.UTF_8); Assertions.assertThat(JsonPath.<String>read(state, "$.sourcePartition.server")) .isEqualTo("oracle_logminer"); Assertions.assertThat(state) .doesNotContain("row") .doesNotContain("server_id") .doesNotContain("event"); source.close(); runThread.sync(); } { // --------------------------------------------------------------------------- // Step-3: restore the source from state // --------------------------------------------------------------------------- final DebeziumSourceFunction<SourceRecord> source2 = createOracleLogminerSource(); final TestSourceContext<SourceRecord> sourceContext2 = new TestSourceContext<>(); setupSource(source2, true, offsetState, historyState, true, 0, 1); final CheckedThread runThread2 = new CheckedThread() { @Override public void go() throws Exception { source2.run(sourceContext2); } }; runThread2.start(); // make sure there is no more events Assertions.assertThat(waitForAvailableRecords(Duration.ofSeconds(5), sourceContext2)) .isFalse(); try (Connection connection = getJdbcConnection(); Statement statement = connection.createStatement()) { statement.execute( "INSERT INTO debezium.products VALUES (110,'robot','Toy robot',1.304)"); // 110 List<SourceRecord> records = drain(sourceContext2, 1); Assertions.assertThat(records).hasSize(1); assertInsert(records.get(0), "ID", 110); // --------------------------------------------------------------------------- // Step-4: trigger checkpoint-2 during DML operations // --------------------------------------------------------------------------- synchronized (sourceContext2.getCheckpointLock()) { // trigger checkpoint-1 source2.snapshotState(new StateSnapshotContextSynchronousImpl(138, 138)); } assertHistoryState(historyState); // assert the DDL is stored in the history state Assertions.assertThat(offsetState.list).hasSize(1); String state = new String(offsetState.list.get(0), StandardCharsets.UTF_8); Assertions.assertThat(JsonPath.<String>read(state, "$.sourcePartition.server")) .isEqualTo("oracle_logminer"); // execute 2 more DMLs to have more redo log statement.execute( "INSERT INTO debezium.products VALUES (1001,'roy','old robot',1234.56)"); // 1001 statement.execute("UPDATE debezium.products SET weight=1345.67 WHERE id=1001"); } // cancel the source source2.close(); runThread2.sync(); } { // --------------------------------------------------------------------------- // Step-5: restore the source from checkpoint-2 // --------------------------------------------------------------------------- final DebeziumSourceFunction<SourceRecord> source3 = createOracleLogminerSource(); final TestSourceContext<SourceRecord> sourceContext3 = new TestSourceContext<>(); setupSource(source3, true, offsetState, historyState, true, 0, 1); // restart the source final CheckedThread runThread3 = new CheckedThread() { @Override public void go() throws Exception { source3.run(sourceContext3); } }; runThread3.start(); // consume the unconsumed redo log List<SourceRecord> records = drain(sourceContext3, 2); assertInsert(records.get(0), "ID", 1001); assertUpdate(records.get(1), "ID", 1001); // make sure there is no more events Assertions.assertThat(waitForAvailableRecords(Duration.ofSeconds(3), sourceContext3)) .isFalse(); // can continue to receive new events try (Connection connection = getJdbcConnection(); Statement statement = connection.createStatement()) { statement.execute("DELETE FROM debezium.products WHERE id=1001"); } records = drain(sourceContext3, 1); assertDelete(records.get(0), "ID", 1001); // --------------------------------------------------------------------------- // Step-6: trigger checkpoint-2 to make sure we can continue to to further checkpoints // --------------------------------------------------------------------------- synchronized (sourceContext3.getCheckpointLock()) { // checkpoint 3 source3.snapshotState(new StateSnapshotContextSynchronousImpl(233, 233)); } assertHistoryState(historyState); // assert the DDL is stored in the history state Assertions.assertThat(offsetState.list).hasSize(1); String state = new String(offsetState.list.get(0), StandardCharsets.UTF_8); Assertions.assertThat(JsonPath.<String>read(state, "$.sourcePartition.server")) .isEqualTo("oracle_logminer"); source3.close(); runThread3.sync(); } { // --------------------------------------------------------------------------- // Step-7: restore the source from checkpoint-3 // --------------------------------------------------------------------------- final DebeziumSourceFunction<SourceRecord> source4 = createOracleLogminerSource(); final TestSourceContext<SourceRecord> sourceContext4 = new TestSourceContext<>(); setupSource(source4, true, offsetState, historyState, true, 0, 1); // restart the source final CheckedThread runThread4 = new CheckedThread() { @Override public void go() throws Exception { source4.run(sourceContext4); } }; runThread4.start(); // make sure there is no more events Assertions.assertThat(waitForAvailableRecords(Duration.ofSeconds(5), sourceContext4)) .isFalse(); // --------------------------------------------------------------------------- // Step-8: trigger checkpoint-3 to make sure we can continue to to further checkpoints // --------------------------------------------------------------------------- synchronized (sourceContext4.getCheckpointLock()) { // checkpoint 4 source4.snapshotState(new StateSnapshotContextSynchronousImpl(254, 254)); } assertHistoryState(historyState); // assert the DDL is stored in the history state Assertions.assertThat(offsetState.list).hasSize(1); String state = new String(offsetState.list.get(0), StandardCharsets.UTF_8); Assertions.assertThat(JsonPath.<String>read(state, "$.sourcePartition.server")) .isEqualTo("oracle_logminer"); source4.close(); runThread4.sync(); } } @Test @Disabled("Debezium Oracle connector don't monitor unknown tables since 1.6, see DBZ-3612") void testRecoverFromRenameOperation() throws Exception { createAndInitialize("product.sql"); final TestingListState<byte[]> offsetState = new TestingListState<>(); final TestingListState<String> historyState = new TestingListState<>(); { try (Connection connection = getJdbcConnection(); Statement statement = connection.createStatement()) { // Step-1: start the source from empty state final DebeziumSourceFunction<SourceRecord> source = createOracleLogminerSource(); final TestSourceContext<SourceRecord> sourceContext = new TestSourceContext<>(); // setup source with empty state setupSource(source, false, offsetState, historyState, true, 0, 1); final CheckedThread runThread = new CheckedThread() { @Override public void go() throws Exception { source.run(sourceContext); } }; runThread.start(); // wait until the source finishes the database snapshot List<SourceRecord> records = drain(sourceContext, 9); Assertions.assertThat(records).hasSize(9); // state is still empty Assertions.assertThat(offsetState.list).isEmpty(); Assertions.assertThat(historyState.list).isEmpty(); // create temporary tables which are not in the whitelist statement.execute( "CREATE TABLE debezium.tp_001_ogt_products as (select * from debezium.products WHERE 1=2)"); // do some renames statement.execute("ALTER TABLE DEBEZIUM.PRODUCTS RENAME TO tp_001_del_products"); statement.execute("ALTER TABLE debezium.tp_001_ogt_products RENAME TO PRODUCTS"); statement.execute( "INSERT INTO debezium.PRODUCTS (ID,NAME,DESCRIPTION,WEIGHT) VALUES (110,'robot','Toy robot',1.304)"); // 110 statement.execute( "INSERT INTO debezium.PRODUCTS (ID,NAME,DESCRIPTION,WEIGHT) VALUES (111,'stream train','Town stream train',1.304)"); // 111 statement.execute( "INSERT INTO debezium.PRODUCTS (ID,NAME,DESCRIPTION,WEIGHT) VALUES (112,'cargo train','City cargo train',1.304)"); // 112 int received = drain(sourceContext, 3).size(); Assertions.assertThat(received).isEqualTo(3); // Step-2: trigger a checkpoint synchronized (sourceContext.getCheckpointLock()) { // trigger checkpoint-1 source.snapshotState(new StateSnapshotContextSynchronousImpl(101, 101)); } Assertions.assertThat(historyState.list).isNotEmpty(); Assertions.assertThat(offsetState.list).isNotEmpty(); source.close(); runThread.sync(); } } { // Step-3: restore the source from state final DebeziumSourceFunction<SourceRecord> source2 = createOracleLogminerSource(); final TestSourceContext<SourceRecord> sourceContext2 = new TestSourceContext<>(); setupSource(source2, true, offsetState, historyState, true, 0, 1); final CheckedThread runThread2 = new CheckedThread() { @Override public void go() throws Exception { source2.run(sourceContext2); } }; runThread2.start(); // make sure there is no more events Assertions.assertThat(waitForAvailableRecords(Duration.ofSeconds(5), sourceContext2)) .isFalse(); try (Connection connection = getJdbcConnection(); Statement statement = connection.createStatement()) { statement.execute( "INSERT INTO debezium.PRODUCTS (ID,NAME,DESCRIPTION,WEIGHT) VALUES (113,'Airplane','Toy airplane',1.304)"); // 113 List<SourceRecord> records = drain(sourceContext2, 1); Assertions.assertThat(records).hasSize(1); assertInsert(records.get(0), "ID", 113); source2.close(); runThread2.sync(); } } } @Test void testConsumingEmptyTable() throws Exception { createAndInitialize("product.sql"); final TestingListState<byte[]> offsetState = new TestingListState<>(); final TestingListState<String> historyState = new TestingListState<>(); int prevPos = 0; { // --------------------------------------------------------------------------- // Step-1: start the source from empty state // --------------------------------------------------------------------------- DebeziumSourceFunction<SourceRecord> source = basicSourceBuilder().tableList("debezium.category").build(); // we use blocking context to block the source to emit before last snapshot record final BlockingSourceContext<SourceRecord> sourceContext = new BlockingSourceContext<>(8); // setup source with empty state setupSource(source, false, offsetState, historyState, true, 0, 1); final CheckedThread runThread = new CheckedThread() { @Override public void go() throws Exception { source.run(sourceContext); } }; runThread.start(); // wait until Debezium is started while (!source.getDebeziumStarted()) { Thread.sleep(100); } // --------------------------------------------------------------------------- // Step-2: trigger checkpoint-1 // --------------------------------------------------------------------------- synchronized (sourceContext.getCheckpointLock()) { source.snapshotState(new StateSnapshotContextSynchronousImpl(101, 101)); } // state is still empty Assertions.assertThat(offsetState.list).isEmpty(); // make sure there is no more events Assertions.assertThat(waitForAvailableRecords(Duration.ofSeconds(5), sourceContext)) .isFalse(); try (Connection connection = getJdbcConnection(); Statement statement = connection.createStatement()) { statement.execute("INSERT INTO debezium.category VALUES (1, 'book')"); statement.execute("INSERT INTO debezium.category VALUES (2, 'shoes')"); statement.execute("UPDATE debezium.category SET category_name='books' WHERE id=1"); List<SourceRecord> records = drain(sourceContext, 3); Assertions.assertThat(records).hasSize(3); assertInsert(records.get(0), "ID", 1); assertInsert(records.get(1), "ID", 2); assertUpdate(records.get(2), "ID", 1); // --------------------------------------------------------------------------- // Step-4: trigger checkpoint-2 during DML operations // --------------------------------------------------------------------------- synchronized (sourceContext.getCheckpointLock()) { // trigger checkpoint-1 source.snapshotState(new StateSnapshotContextSynchronousImpl(138, 138)); } assertHistoryState(historyState); // assert the DDL is stored in the history state Assertions.assertThat(offsetState.list).hasSize(1); String state = new String(offsetState.list.get(0), StandardCharsets.UTF_8); Assertions.assertThat(JsonPath.<String>read(state, "$.sourcePartition.server")) .isEqualTo("oracle_logminer"); } source.close(); runThread.sync(); } } private void assertHistoryState(TestingListState<String> historyState) { // assert the DDL is stored in the history state Assertions.assertThat(historyState.list).isNotEmpty(); boolean hasTable = historyState.list.stream() .skip(1) .anyMatch( history -> !((Map<?, ?>) JsonPath.read(history, "$.table")).isEmpty() && (JsonPath.read(history, "$.type") .toString() .equals("CREATE") || JsonPath.read(history, "$.type") .toString() .equals("ALTER"))); Assertions.assertThat(hasTable).isTrue(); } // ------------------------------------------------------------------------------------------ // Public Utilities // ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------ // Utilities // ------------------------------------------------------------------------------------------ private DebeziumSourceFunction<SourceRecord> createOracleLogminerSource() { return basicSourceBuilder().build(); } private OracleSource.Builder<SourceRecord> basicSourceBuilder() { Properties debeziumProperties = new Properties(); debeziumProperties.setProperty("debezium.log.mining.strategy", "online_catalog"); // ignore APEX ORCLCDB system tables changes debeziumProperties.setProperty("database.history.store.only.captured.tables.ddl", "true"); return OracleSource.<SourceRecord>builder() .hostname(ORACLE_CONTAINER.getHost()) .port(ORACLE_CONTAINER.getOraclePort()) .database("ORCLCDB") .tableList("debezium" + "." + "products") // monitor table "products" .username(ORACLE_CONTAINER.getUsername()) .password(ORACLE_CONTAINER.getPassword()) .debeziumProperties(debeziumProperties) .deserializer(new ForwardDeserializeSchema()); } private static <T> List<T> drain(TestSourceContext<T> sourceContext, int expectedRecordCount) throws Exception { List<T> allRecords = new ArrayList<>(); LinkedBlockingQueue<StreamRecord<T>> queue = sourceContext.getCollectedOutputs(); while (allRecords.size() < expectedRecordCount) { StreamRecord<T> record = queue.poll(200, TimeUnit.SECONDS); if (record != null) { allRecords.add(record.getValue()); } else { throw new RuntimeException( "Can't receive " + expectedRecordCount + " elements before timeout."); } } return allRecords; } private boolean waitForCheckpointLock(Object checkpointLock, Duration timeout) throws Exception { final Semaphore semaphore = new Semaphore(0); ExecutorService executor = Executors.newSingleThreadExecutor(); executor.execute( () -> { synchronized (checkpointLock) { semaphore.release(); } }); boolean result = semaphore.tryAcquire(timeout.toMillis(), TimeUnit.MILLISECONDS); executor.shutdownNow(); return result; } /** * Wait for a maximum amount of time until the first record is available. * * @param timeout the maximum amount of time to wait; must not be negative * @return {@code true} if records are available, or {@code false} if the timeout occurred and * no records are available */ private boolean waitForAvailableRecords(Duration timeout, TestSourceContext<?> sourceContext) throws InterruptedException { long now = System.currentTimeMillis(); long stop = now + timeout.toMillis(); while (System.currentTimeMillis() < stop) { if (!sourceContext.getCollectedOutputs().isEmpty()) { break; } Thread.sleep(10); // save CPU } return !sourceContext.getCollectedOutputs().isEmpty(); } private static <T> void setupSource(DebeziumSourceFunction<T> source) throws Exception { setupSource( source, false, null, null, true, // enable checkpointing; auto commit should be ignored 0, 1); } private static <T, S1, S2> void setupSource( DebeziumSourceFunction<T> source, boolean isRestored, ListState<S1> restoredOffsetState, ListState<S2> restoredHistoryState, boolean isCheckpointingEnabled, int subtaskIndex, int totalNumSubtasks) throws Exception { // run setup procedure in operator life cycle source.setRuntimeContext( new MockStreamingRuntimeContext( isCheckpointingEnabled, totalNumSubtasks, subtaskIndex)); source.initializeState( new MockFunctionInitializationContext( isRestored, new MockOperatorStateStore(restoredOffsetState, restoredHistoryState))); source.open(new Configuration()); } /** * A simple implementation of {@link DebeziumDeserializationSchema} which just forward the * {@link SourceRecord}. */ public static class ForwardDeserializeSchema implements DebeziumDeserializationSchema<SourceRecord> { private static final long serialVersionUID = 2975058057832211228L; @Override public void deserialize(SourceRecord record, Collector<SourceRecord> out) throws Exception { out.collect(record); } @Override public TypeInformation<SourceRecord> getProducedType() { return TypeInformation.of(SourceRecord.class); } } private static class MockOperatorStateStore implements OperatorStateStore { private final ListState<?> restoredOffsetListState; private final ListState<?> restoredHistoryListState; private MockOperatorStateStore( ListState<?> restoredOffsetListState, ListState<?> restoredHistoryListState) { this.restoredOffsetListState = restoredOffsetListState; this.restoredHistoryListState = restoredHistoryListState; } @Override @SuppressWarnings("unchecked") public <S> ListState<S> getUnionListState(ListStateDescriptor<S> stateDescriptor) throws Exception { if (stateDescriptor.getName().equals(DebeziumSourceFunction.OFFSETS_STATE_NAME)) { return (ListState<S>) restoredOffsetListState; } else if (stateDescriptor .getName() .equals(DebeziumSourceFunction.HISTORY_RECORDS_STATE_NAME)) { return (ListState<S>) restoredHistoryListState; } else { throw new IllegalStateException("Unknown state."); } } @Override public <K, V> BroadcastState<K, V> getBroadcastState( MapStateDescriptor<K, V> stateDescriptor) throws Exception { throw new UnsupportedOperationException(); } @Override public <S> ListState<S> getListState(ListStateDescriptor<S> stateDescriptor) throws Exception { throw new UnsupportedOperationException(); } @Override public Set<String> getRegisteredStateNames() { throw new UnsupportedOperationException(); } @Override public Set<String> getRegisteredBroadcastStateNames() { throw new UnsupportedOperationException(); } } private static class MockFunctionInitializationContext implements FunctionInitializationContext { private final boolean isRestored; private final OperatorStateStore operatorStateStore; private MockFunctionInitializationContext( boolean isRestored, OperatorStateStore operatorStateStore) { this.isRestored = isRestored; this.operatorStateStore = operatorStateStore; } @Override public boolean isRestored() { return isRestored; } @Override public OptionalLong getRestoredCheckpointId() { throw new UnsupportedOperationException(); } @Override public OperatorStateStore getOperatorStateStore() { return operatorStateStore; } @Override public KeyedStateStore getKeyedStateStore() { throw new UnsupportedOperationException(); } } private static class BlockingSourceContext<T> extends TestSourceContext<T> { private final Semaphore blocker = new Semaphore(0); private final int expectedCount; private int currentCount = 0; private BlockingSourceContext(int expectedCount) { this.expectedCount = expectedCount; } @Override public void collect(T t) { super.collect(t); currentCount++; if (currentCount == expectedCount) { try { // block the source to emit records blocker.acquire(); } catch (InterruptedException e) { // ignore } } } } private static final class TestingListState<T> implements ListState<T> { private final List<T> list = new ArrayList<>(); private boolean clearCalled = false; @Override public void clear() { list.clear(); clearCalled = true; } @Override public Iterable<T> get() throws Exception { return list; } @Override public void add(T value) throws Exception { Preconditions.checkNotNull(value, "You cannot add null to a ListState."); list.add(value); } public List<T> getList() { return list; } boolean isClearCalled() { return clearCalled; } @Override public void update(List<T> values) throws Exception { clear(); addAll(values); } @Override public void addAll(List<T> values) throws Exception { if (values != null) { values.forEach( v -> Preconditions.checkNotNull(v, "You cannot add null to a ListState.")); list.addAll(values); } } } }
googleapis/google-cloud-java
37,162
java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/ListSampleQuerySetsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1beta/sample_query_set_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1beta; /** * * * <pre> * Request message for * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest} */ public final class ListSampleQuerySetsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest) ListSampleQuerySetsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListSampleQuerySetsRequest.newBuilder() to construct. private ListSampleQuerySetsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSampleQuerySetsRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSampleQuerySetsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.SampleQuerySetServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListSampleQuerySetsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.SampleQuerySetServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListSampleQuerySetsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest.class, com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * * If the caller does not have permission to list * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s under * this location, regardless of whether or not this location exists, a * `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * * If the caller does not have permission to list * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s under * this location, regardless of whether or not this location exists, a * `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Maximum number of * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListSampleQuerySetsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListSampleQuerySetsResponse.next_page_token], * received from a previous * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token * [ListSampleQuerySetsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListSampleQuerySetsResponse.next_page_token], * received from a previous * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest other = (com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest) com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.SampleQuerySetServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListSampleQuerySetsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.SampleQuerySetServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListSampleQuerySetsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest.class, com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest.Builder.class); } // Construct using // com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1beta.SampleQuerySetServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListSampleQuerySetsRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest build() { com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest buildPartial() { com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest result = new com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest) { return mergeFrom( (com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest other) { if (other == com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * * If the caller does not have permission to list * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s under * this location, regardless of whether or not this location exists, a * `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * * If the caller does not have permission to list * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s under * this location, regardless of whether or not this location exists, a * `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * * If the caller does not have permission to list * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s under * this location, regardless of whether or not this location exists, a * `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * * If the caller does not have permission to list * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s under * this location, regardless of whether or not this location exists, a * `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * * If the caller does not have permission to list * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s under * this location, regardless of whether or not this location exists, a * `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Maximum number of * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Maximum number of * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Maximum number of * [SampleQuerySet][google.cloud.discoveryengine.v1beta.SampleQuerySet]s to * return. If unspecified, defaults to 100. The maximum allowed value is 1000. * Values above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListSampleQuerySetsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListSampleQuerySetsResponse.next_page_token], * received from a previous * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token * [ListSampleQuerySetsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListSampleQuerySetsResponse.next_page_token], * received from a previous * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token * [ListSampleQuerySetsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListSampleQuerySetsResponse.next_page_token], * received from a previous * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A page token * [ListSampleQuerySetsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListSampleQuerySetsResponse.next_page_token], * received from a previous * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A page token * [ListSampleQuerySetsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListSampleQuerySetsResponse.next_page_token], * received from a previous * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQuerySetService.ListSampleQuerySets][google.cloud.discoveryengine.v1beta.SampleQuerySetService.ListSampleQuerySets] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest) private static final com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest(); } public static com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSampleQuerySetsRequest> PARSER = new com.google.protobuf.AbstractParser<ListSampleQuerySetsRequest>() { @java.lang.Override public ListSampleQuerySetsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSampleQuerySetsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSampleQuerySetsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListSampleQuerySetsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
37,138
jdk/src/share/demo/java2d/J2DBench/src/j2dbench/tests/ImageTests.java
/* * Copyright (c) 2002, 2011, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* * This source code is provided to illustrate the usage of a given feature * or technique and has been deliberately simplified. Additional steps * required for a production-quality application, such as security checks, * input validation and proper error handling, might not be present in * this sample code. */ package j2dbench.tests; import j2dbench.Destinations; import j2dbench.Group; import j2dbench.Modifier; import j2dbench.Option; import j2dbench.TestEnvironment; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Color; import java.awt.Image; import java.awt.Canvas; import java.awt.AlphaComposite; import java.awt.Dimension; import java.awt.GraphicsConfiguration; import java.awt.image.BufferedImage; import java.awt.image.BufferedImageOp; import java.awt.image.ByteLookupTable; import java.awt.image.ConvolveOp; import java.awt.image.DataBuffer; import java.awt.image.IndexColorModel; import java.awt.image.Kernel; import java.awt.image.LookupOp; import java.awt.image.Raster; import java.awt.image.RasterOp; import java.awt.image.RescaleOp; import java.awt.image.ShortLookupTable; import java.awt.image.VolatileImage; import java.awt.image.WritableRaster; import java.awt.Transparency; import java.awt.geom.AffineTransform; import java.awt.image.DataBufferByte; import java.awt.image.DataBufferInt; import java.awt.image.DataBufferShort; import java.util.ArrayList; import javax.swing.JComponent; public abstract class ImageTests extends GraphicsTests { public static boolean hasVolatileImage; public static boolean hasCompatImage; static { try { hasVolatileImage = (VolatileImage.class != null); } catch (NoClassDefFoundError e) { } try { new Canvas().getGraphicsConfiguration(); hasCompatImage = true; } catch (NoSuchMethodError e) { } } static Group imageroot; static Group.EnableSet imgsrcroot; static Group.EnableSet bufimgsrcroot; static Group imgtestroot; static Group imgoptionsroot; static Group imageOpRoot; static Group imageOpOptRoot; static Group imageOpTestRoot; static Group graphicsTestRoot; static Group bufImgOpTestRoot; static Group rasterOpTestRoot; static Option opList; static Option doTouchSrc; static String transNodeNames[] = { null, "opaque", "bitmask", "translucent", }; static String transDescriptions[] = { null, "Opaque", "Bitmask", "Translucent", }; public static void init() { imageroot = new Group(graphicsroot, "imaging", "Imaging Benchmarks"); imageroot.setTabbed(); imgsrcroot = new Group.EnableSet(imageroot, "src", "Image Rendering Sources"); imgsrcroot.setBordered(true); imgoptionsroot = new Group(imgsrcroot, "options", "Image Source Options"); imgoptionsroot.setBordered(true); doTouchSrc = new Option.Toggle(imgoptionsroot, "touchsrc", "Touch src image before every operation", Option.Toggle.Off); imgtestroot = new Group(imageroot, "tests", "Image Rendering Tests"); imgtestroot.setBordered(true); new OffScreen(); if (hasGraphics2D) { if (hasCompatImage) { new CompatImg(Transparency.OPAQUE); new CompatImg(Transparency.BITMASK); new CompatImg(Transparency.TRANSLUCENT); } if (hasVolatileImage) { new VolatileImg(); } bufimgsrcroot = new Group.EnableSet(imgsrcroot, "bufimg", "BufferedImage Rendering Sources"); new BufImg(BufferedImage.TYPE_INT_RGB); new BufImg(BufferedImage.TYPE_INT_ARGB); new BufImg(BufferedImage.TYPE_BYTE_GRAY); new BufImg(BufferedImage.TYPE_3BYTE_BGR); new BmByteIndexBufImg(); new BufImg(BufferedImage.TYPE_INT_RGB, true); new BufImg(BufferedImage.TYPE_INT_ARGB, true); new BufImg(BufferedImage.TYPE_3BYTE_BGR, true); imageOpRoot = new Group(imageroot, "imageops", "Image Op Benchmarks"); imageOpOptRoot = new Group(imageOpRoot, "opts", "Options"); imageOpTestRoot = new Group(imageOpRoot, "tests", "Tests"); graphicsTestRoot = new Group(imageOpTestRoot, "graphics2d", "Graphics2D Tests"); bufImgOpTestRoot = new Group(imageOpTestRoot, "bufimgop", "BufferedImageOp Tests"); rasterOpTestRoot = new Group(imageOpTestRoot, "rasterop", "RasterOp Tests"); ArrayList opStrs = new ArrayList(); ArrayList opDescs = new ArrayList(); opStrs.add("convolve3x3zero"); opDescs.add("ConvolveOp (3x3 blur, zero)"); opStrs.add("convolve3x3noop"); opDescs.add("ConvolveOp (3x3 blur, noop)"); opStrs.add("convolve5x5zero"); opDescs.add("ConvolveOp (5x5 edge, zero)"); opStrs.add("convolve5x5noop"); opDescs.add("ConvolveOp (5x5 edge, noop)"); opStrs.add("lookup1byte"); opDescs.add("LookupOp (1 band, byte)"); opStrs.add("lookup1short"); opDescs.add("LookupOp (1 band, short)"); opStrs.add("lookup3byte"); opDescs.add("LookupOp (3 band, byte)"); opStrs.add("lookup3short"); opDescs.add("LookupOp (3 band, short)"); opStrs.add("rescale1band"); opDescs.add("RescaleOp (1 band)"); opStrs.add("rescale3band"); opDescs.add("RescaleOp (3 band)"); String[] opStrArr = new String[opStrs.size()]; opStrArr = (String[])opStrs.toArray(opStrArr); String[] opDescArr = new String[opDescs.size()]; opDescArr = (String[])opDescs.toArray(opDescArr); opList = new Option.ObjectList(imageOpOptRoot, "op", "Operation", opStrArr, opStrArr, opStrArr, opDescArr, 0x1); ((Option.ObjectList) opList).setNumRows(4); new DrawImageOp(); new BufImgOpFilter(false); new BufImgOpFilter(true); new RasterOpFilter(false); new RasterOpFilter(true); } new DrawImage(); new DrawImageBg(); new DrawImageScale("up", 1.5f); new DrawImageScale("down", .75f); new DrawImageTransform(); } public static class Context extends GraphicsTests.Context { boolean touchSrc; Image src; AffineTransform tx; } public ImageTests(Group parent, String nodeName, String description) { this(parent, nodeName, description, null); } public ImageTests(Group parent, String nodeName, String description, Modifier.Filter srcFilter) { super(parent, nodeName, description); addDependency(imgsrcroot, srcFilter); addDependency(doTouchSrc); } public GraphicsTests.Context createContext() { return new ImageTests.Context(); } public void initContext(TestEnvironment env, GraphicsTests.Context ctx) { super.initContext(env, ctx); ImageTests.Context ictx = (ImageTests.Context) ctx; ictx.src = env.getSrcImage(); ictx.touchSrc = env.isEnabled(doTouchSrc); } public abstract static class TriStateImageType extends Group { Image theImage; public TriStateImageType(Group parent, String nodename, String desc, int transparency) { super(parent, nodename, desc); setHorizontal(); new DrawableImage(this, Transparency.OPAQUE, true); new DrawableImage(this, Transparency.BITMASK, (transparency != Transparency.OPAQUE)); new DrawableImage(this, Transparency.TRANSLUCENT, (transparency == Transparency.TRANSLUCENT)); } public Image getImage(TestEnvironment env, int w, int h) { if (theImage == null || theImage.getWidth(null) != w || theImage.getHeight(null) != h) { theImage = makeImage(env, w, h); } return theImage; } public abstract Image makeImage(TestEnvironment env, int w, int h); } public static class OffScreen extends TriStateImageType { public OffScreen() { super(imgsrcroot, "offscr", "Offscreen Image", Transparency.OPAQUE); } public Image makeImage(TestEnvironment env, int w, int h) { Canvas c = env.getCanvas(); return c.createImage(w, h); } } public static class VolatileImg extends TriStateImageType { public VolatileImg() { super(imgsrcroot, "volimg", "Volatile Image", Transparency.OPAQUE); } public Image makeImage(TestEnvironment env, int w, int h) { Canvas c = env.getCanvas(); return c.createVolatileImage(w, h); } } public static class CompatImg extends TriStateImageType { int transparency; public CompatImg(int transparency) { super(imgsrcroot, Destinations.CompatImg.ShortNames[transparency], Destinations.CompatImg.LongDescriptions[transparency], transparency); this.transparency = transparency; } public Image makeImage(TestEnvironment env, int w, int h) { Canvas c = env.getCanvas(); GraphicsConfiguration gc = c.getGraphicsConfiguration(); return gc.createCompatibleImage(w, h, transparency); } } public static class BufImg extends TriStateImageType { int type; boolean unmanaged; static int Transparencies[] = { Transparency.TRANSLUCENT, // "custom", Transparency.OPAQUE, // "IntXrgb", Transparency.TRANSLUCENT, // "IntArgb", Transparency.TRANSLUCENT, // "IntArgbPre", Transparency.OPAQUE, // "IntXbgr", Transparency.OPAQUE, // "3ByteBgr", Transparency.TRANSLUCENT, // "4ByteAbgr", Transparency.TRANSLUCENT, // "4ByteAbgrPre", Transparency.OPAQUE, // "Short565", Transparency.OPAQUE, // "Short555", Transparency.OPAQUE, // "ByteGray", Transparency.OPAQUE, // "ShortGray", Transparency.OPAQUE, // "ByteBinary", Transparency.OPAQUE, // "ByteIndexed", }; public BufImg(int type) { this(type, false); } public BufImg(int type, boolean unmanaged) { super(bufimgsrcroot, (unmanaged ? "unmanaged" : "") + Destinations.BufImg.ShortNames[type], (unmanaged ? "Unmanaged " : "") + Destinations.BufImg.Descriptions[type], Transparencies[type]); this.type = type; this.unmanaged = unmanaged; } public Image makeImage(TestEnvironment env, int w, int h) { BufferedImage img = new BufferedImage(w, h, type); if (unmanaged) { DataBuffer db = img.getRaster().getDataBuffer(); if (db instanceof DataBufferInt) { ((DataBufferInt)db).getData(); } else if (db instanceof DataBufferShort) { ((DataBufferShort)db).getData(); } else if (db instanceof DataBufferByte) { ((DataBufferByte)db).getData(); } else { try { img.setAccelerationPriority(0.0f); } catch (Throwable e) {} } } return img; } } public static class BmByteIndexBufImg extends TriStateImageType { static IndexColorModel icm; public BmByteIndexBufImg() { super(bufimgsrcroot, "ByteIndexedBm", "8-bit Transparent Indexed Image", Transparency.BITMASK); } public Image makeImage(TestEnvironment env, int w, int h) { if (icm == null) { int cmap[] = new int[256]; // Workaround for transparency rendering bug in earlier VMs // Can only render transparency if first cmap entry is 0x0 // This bug is fixed in 1.4.2 (Mantis) int i = 1; for (int r = 0; r < 256; r += 51) { for (int g = 0; g < 256; g += 51) { for (int b = 0; b < 256; b += 51) { cmap[i++] = (0xff<<24)|(r<<16)|(g<<8)|b; } } } // Leave the rest of the colormap transparent icm = new IndexColorModel(8, 256, cmap, 0, true, 255, DataBuffer.TYPE_BYTE); } return new BufferedImage(w, h, BufferedImage.TYPE_BYTE_INDEXED, icm); } } public static class DrawableImage extends Option.Enable { static Color transparentBlack = makeAlphaColor(Color.black, 0); static Color translucentRed = makeAlphaColor(Color.red, 192); static Color translucentGreen = makeAlphaColor(Color.green, 128); static Color translucentYellow = makeAlphaColor(Color.yellow, 64); static Color colorsets[][] = new Color[][] { null, { Color.blue, Color.red, Color.green, Color.yellow, Color.blue, }, { transparentBlack, Color.red, Color.green, transparentBlack, transparentBlack, }, { Color.blue, translucentRed, translucentGreen, translucentYellow, translucentRed, }, }; TriStateImageType tsit; int transparency; boolean possible; public DrawableImage(TriStateImageType parent, int transparency, boolean possible) { super(parent, transNodeNames[transparency], transDescriptions[transparency], false); this.tsit = parent; this.transparency = transparency; this.possible = possible; } public int getTransparency() { return transparency; } public JComponent getJComponent() { JComponent comp = super.getJComponent(); comp.setEnabled(possible); return comp; } public String setValueFromString(String value) { if (!possible && !value.equalsIgnoreCase("disabled")) { return "Bad Value"; } return super.setValueFromString(value); } public void modifyTest(TestEnvironment env) { int size = env.getIntValue(sizeList); Image src = tsit.getImage(env, size, size); Graphics g = src.getGraphics(); if (hasGraphics2D) { ((Graphics2D) g).setComposite(AlphaComposite.Src); } if (size == 1) { g.setColor(colorsets[transparency][4]); g.fillRect(0, 0, 1, 1); } else { int mid = size/2; g.setColor(colorsets[transparency][0]); g.fillRect(0, 0, mid, mid); g.setColor(colorsets[transparency][1]); g.fillRect(mid, 0, size-mid, mid); g.setColor(colorsets[transparency][2]); g.fillRect(0, mid, mid, size-mid); g.setColor(colorsets[transparency][3]); g.fillRect(mid, mid, size-mid, size-mid); } g.dispose(); env.setSrcImage(src); } public void restoreTest(TestEnvironment env) { env.setSrcImage(null); } public String getAbbreviatedModifierDescription(Object value) { return "from "+getModifierValueName(value); } public String getModifierValueName(Object val) { return getParent().getNodeName()+" "+getNodeName(); } } public static class DrawImage extends ImageTests { public DrawImage() { super(imgtestroot, "drawimage", "drawImage(img, x, y, obs);"); } public void runTest(Object ctx, int numReps) { ImageTests.Context ictx = (ImageTests.Context) ctx; int x = ictx.initX; int y = ictx.initY; Graphics g = ictx.graphics; g.translate(ictx.orgX, ictx.orgY); Image src = ictx.src; if (ictx.animate) { if (ictx.touchSrc) { Graphics srcG = src.getGraphics(); do { srcG.fillRect(0, 0, 1, 1); g.drawImage(src, x, y, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } else { do { g.drawImage(src, x, y, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } } else { if (ictx.touchSrc) { Graphics srcG = src.getGraphics(); do { srcG.fillRect(0, 0, 1, 1); g.drawImage(src, x, y, null); } while (--numReps > 0); } else { do { g.drawImage(src, x, y, null); } while (--numReps > 0); } } g.translate(-ictx.orgX, -ictx.orgY); } } public static class DrawImageBg extends ImageTests { public DrawImageBg() { super(imgtestroot, "drawimagebg", "drawImage(img, x, y, bg, obs);", new Modifier.Filter() { public boolean isCompatible(Object val) { DrawableImage di = (DrawableImage) val; return (di.getTransparency() != Transparency.OPAQUE); } }); } public void runTest(Object ctx, int numReps) { ImageTests.Context ictx = (ImageTests.Context) ctx; int x = ictx.initX; int y = ictx.initY; Graphics g = ictx.graphics; g.translate(ictx.orgX, ictx.orgY); Image src = ictx.src; Color bg = Color.orange; if (ictx.animate) { if (ictx.touchSrc) { Graphics srcG = src.getGraphics(); do { srcG.fillRect(0, 0, 1, 1); g.drawImage(src, x, y, bg, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } else { do { g.drawImage(src, x, y, bg, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } } else { if (ictx.touchSrc) { Graphics srcG = src.getGraphics(); do { srcG.fillRect(0, 0, 1, 1); g.drawImage(src, x, y, bg, null); } while (--numReps > 0); } else { do { g.drawImage(src, x, y, bg, null); } while (--numReps > 0); } } g.translate(-ictx.orgX, -ictx.orgY); } } public static class DrawImageScale extends ImageTests { float scale; public DrawImageScale(String dir, float scale) { super(imgtestroot, "drawimagescale"+dir, "drawImage(img, x, y, w*"+scale+", h*"+scale+", obs);"); this.scale = scale; } public Dimension getOutputSize(int w, int h) { int neww = (int) (w * scale); int newh = (int) (h * scale); if (neww == w && scale > 1f) neww = w+1; if (newh == h && scale > 1f) newh = h+1; return new Dimension(neww, newh); } public void runTest(Object ctx, int numReps) { ImageTests.Context ictx = (ImageTests.Context) ctx; int x = ictx.initX; int y = ictx.initY; int w = ictx.outdim.width; int h = ictx.outdim.height; Graphics g = ictx.graphics; g.translate(ictx.orgX, ictx.orgY); Image src = ictx.src; if (ictx.animate) { if (ictx.touchSrc) { Graphics srcG = src.getGraphics(); do { srcG.fillRect(0, 0, 1, 1); g.drawImage(src, x, y, w, h, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } else { do { g.drawImage(src, x, y, w, h, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } } else { Graphics srcG = src.getGraphics(); if (ictx.touchSrc) { do { srcG.fillRect(0, 0, 1, 1); g.drawImage(src, x, y, w, h, null); } while (--numReps > 0); } else { do { g.drawImage(src, x, y, w, h, null); } while (--numReps > 0); } } g.translate(-ictx.orgX, -ictx.orgY); } } public static class DrawImageTransform extends ImageTests { public DrawImageTransform() { super(imgtestroot, "drawimagetxform", "drawImage(img, tx, obs);"); } public Dimension getOutputSize(int w, int h) { int neww = (int) Math.ceil(w * 1.1); int newh = (int) Math.ceil(h * 1.1); return new Dimension(neww, newh); } public void initContext(TestEnvironment env, GraphicsTests.Context ctx) { super.initContext(env, ctx); ImageTests.Context ictx = (ImageTests.Context) ctx; ictx.tx = new AffineTransform(); } public void runTest(Object ctx, int numReps) { ImageTests.Context ictx = (ImageTests.Context) ctx; int x = ictx.initX; int y = ictx.initY; Graphics2D g = (Graphics2D) ictx.graphics; g.translate(ictx.orgX, ictx.orgY); Image src = ictx.src; AffineTransform tx = ictx.tx; if (ictx.animate) { if (ictx.touchSrc) { Graphics srcG = src.getGraphics(); do { tx.setTransform(1.0, 0.1, 0.1, 1.0, x, y); srcG.fillRect(0, 0, 1, 1); g.drawImage(src, tx, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } else { do { tx.setTransform(1.0, 0.1, 0.1, 1.0, x, y); g.drawImage(src, tx, null); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } } else { tx.setTransform(1.0, 0.1, 0.1, 1.0, x, y); if (ictx.touchSrc) { Graphics srcG = src.getGraphics(); do { srcG.fillRect(0, 0, 1, 1); g.drawImage(src, tx, null); } while (--numReps > 0); } else { do { g.drawImage(src, tx, null); } while (--numReps > 0); } } g.translate(-ictx.orgX, -ictx.orgY); } } private static abstract class ImageOpTests extends ImageTests { ImageOpTests(Group parent, String nodeName, String desc) { super(parent, nodeName, desc, new Modifier.Filter() { public boolean isCompatible(Object val) { // Filter out all non-BufferedImage sources DrawableImage di = (DrawableImage) val; Group imgtype = di.getParent(); return !(imgtype instanceof VolatileImg) && !(imgtype instanceof OffScreen); } }); addDependencies(imageOpOptRoot, true); } private static class Context extends ImageTests.Context { BufferedImageOp bufImgOp; BufferedImage bufSrc; BufferedImage bufDst; RasterOp rasterOp; Raster rasSrc; WritableRaster rasDst; } public GraphicsTests.Context createContext() { return new ImageOpTests.Context(); } public void initContext(TestEnvironment env, GraphicsTests.Context ctx) { super.initContext(env, ctx); ImageOpTests.Context ictx = (ImageOpTests.Context)ctx; // Note: We filter out all non-BufferedImage sources in the // ImageOpTests constructor above, so the following is safe... ictx.bufSrc = (BufferedImage)ictx.src; String op = (String)env.getModifier(opList); if (op.startsWith("convolve")) { Kernel kernel; if (op.startsWith("convolve3x3")) { // 3x3 blur float[] data = { 0.1f, 0.1f, 0.1f, 0.1f, 0.2f, 0.1f, 0.1f, 0.1f, 0.1f, }; kernel = new Kernel(3, 3, data); } else { // (op.startsWith("convolve5x5")) // 5x5 edge float[] data = { -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 24.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, }; kernel = new Kernel(5, 5, data); } int edge = op.endsWith("zero") ? ConvolveOp.EDGE_ZERO_FILL : ConvolveOp.EDGE_NO_OP; ictx.bufImgOp = new ConvolveOp(kernel, edge, null); } else if (op.startsWith("lookup")) { if (op.endsWith("byte")) { byte invert[] = new byte[256]; byte ordered[] = new byte[256]; for (int j = 0; j < 256 ; j++) { invert[j] = (byte)(255-j); ordered[j] = (byte)j; } if (op.equals("lookup1byte")) { ictx.bufImgOp = new LookupOp(new ByteLookupTable(0, invert), null); } else { // (op.equals("lookup3byte")) byte[][] yellowInvert = new byte[][] { invert, invert, ordered }; ictx.bufImgOp = new LookupOp(new ByteLookupTable(0, yellowInvert), null); } } else { // (op.endsWith("short")) short invert[] = new short[256]; short ordered[] = new short[256]; for (int j = 0; j < 256 ; j++) { invert[j] = (short)((255-j) * 255); ordered[j] = (short)(j * 255); } if (op.equals("lookup1short")) { ictx.bufImgOp = new LookupOp(new ShortLookupTable(0, invert), null); } else { // (op.equals("lookup3short")) short[][] yellowInvert = new short[][] { invert, invert, ordered }; ictx.bufImgOp = new LookupOp(new ShortLookupTable(0, yellowInvert), null); } } } else if (op.equals("rescale1band")) { ictx.bufImgOp = new RescaleOp(0.5f, 10.0f, null); } else if (op.equals("rescale3band")) { float[] scaleFactors = { 0.5f, 0.3f, 0.8f }; float[] offsets = { 5.0f, -7.5f, 1.0f }; ictx.bufImgOp = new RescaleOp(scaleFactors, offsets, null); } else { throw new InternalError("Invalid image op"); } ictx.rasterOp = (RasterOp)ictx.bufImgOp; } } private static class DrawImageOp extends ImageOpTests { DrawImageOp() { super(graphicsTestRoot, "drawimageop", "drawImage(srcBufImg, op, x, y);"); } public void runTest(Object ctx, int numReps) { ImageOpTests.Context ictx = (ImageOpTests.Context)ctx; int x = ictx.initX; int y = ictx.initY; BufferedImageOp op = ictx.bufImgOp; BufferedImage src = ictx.bufSrc; Graphics2D g2 = (Graphics2D)ictx.graphics; g2.translate(ictx.orgX, ictx.orgY); if (ictx.animate) { if (ictx.touchSrc) { Graphics gSrc = src.getGraphics(); do { gSrc.fillRect(0, 0, 1, 1); g2.drawImage(src, op, x, y); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } else { do { g2.drawImage(src, op, x, y); if ((x -= 3) < 0) x += ictx.maxX; if ((y -= 1) < 0) y += ictx.maxY; } while (--numReps > 0); } } else { if (ictx.touchSrc) { Graphics gSrc = src.getGraphics(); do { gSrc.fillRect(0, 0, 1, 1); g2.drawImage(src, op, x, y); } while (--numReps > 0); } else { do { g2.drawImage(src, op, x, y); } while (--numReps > 0); } } g2.translate(-ictx.orgX, -ictx.orgY); } } private static class BufImgOpFilter extends ImageOpTests { private boolean cached; BufImgOpFilter(boolean cached) { super(bufImgOpTestRoot, "filter" + (cached ? "cached" : "null"), "op.filter(srcBufImg, " + (cached ? "cachedCompatibleDestImg" : "null") + ");"); this.cached = cached; } public void initContext(TestEnvironment env, GraphicsTests.Context ctx) { super.initContext(env, ctx); ImageOpTests.Context ictx = (ImageOpTests.Context)ctx; if (cached) { ictx.bufDst = ictx.bufImgOp.createCompatibleDestImage(ictx.bufSrc, null); } } public void runTest(Object ctx, int numReps) { ImageOpTests.Context ictx = (ImageOpTests.Context)ctx; BufferedImageOp op = ictx.bufImgOp; BufferedImage src = ictx.bufSrc; BufferedImage dst = ictx.bufDst; if (ictx.touchSrc) { Graphics gSrc = src.getGraphics(); do { gSrc.fillRect(0, 0, 1, 1); op.filter(src, dst); } while (--numReps > 0); } else { do { op.filter(src, dst); } while (--numReps > 0); } } } private static class RasterOpFilter extends ImageOpTests { private boolean cached; RasterOpFilter(boolean cached) { super(rasterOpTestRoot, "filter" + (cached ? "cached" : "null"), "op.filter(srcRaster, " + (cached ? "cachedCompatibleDestRaster" : "null") + ");"); this.cached = cached; } public void initContext(TestEnvironment env, GraphicsTests.Context ctx) { super.initContext(env, ctx); ImageOpTests.Context ictx = (ImageOpTests.Context)ctx; ictx.rasSrc = ictx.bufSrc.getRaster(); if (cached) { ictx.bufDst = ictx.bufImgOp.createCompatibleDestImage(ictx.bufSrc, null); ictx.rasDst = ictx.bufDst.getRaster(); } } public void runTest(Object ctx, int numReps) { ImageOpTests.Context ictx = (ImageOpTests.Context)ctx; RasterOp op = ictx.rasterOp; Raster src = ictx.rasSrc; WritableRaster dst = ictx.rasDst; if (ictx.touchSrc) { Graphics gSrc = ictx.bufSrc.getGraphics(); do { gSrc.fillRect(0, 0, 1, 1); op.filter(src, dst); } while (--numReps > 0); } else { do { op.filter(src, dst); } while (--numReps > 0); } } } }
apache/hadoop
37,106
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import javax.crypto.KeyGenerator; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.ProtocolSignature; import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus; import org.apache.hadoop.mapreduce.ClusterMetrics; import org.apache.hadoop.mapreduce.CryptoUtils; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.QueueInfo; import org.apache.hadoop.mapreduce.TaskCompletionEvent; import org.apache.hadoop.mapreduce.TaskTrackerInfo; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID; import org.apache.hadoop.mapreduce.protocol.ClientProtocol; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.v2.LogParams; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Implements MapReduce locally, in-process, for debugging. */ @InterfaceAudience.Private @InterfaceStability.Unstable public class LocalJobRunner implements ClientProtocol { public static final Logger LOG = LoggerFactory.getLogger(LocalJobRunner.class); /** The maximum number of map tasks to run in parallel in LocalJobRunner */ public static final String LOCAL_MAX_MAPS = "mapreduce.local.map.tasks.maximum"; /** The maximum number of reduce tasks to run in parallel in LocalJobRunner */ public static final String LOCAL_MAX_REDUCES = "mapreduce.local.reduce.tasks.maximum"; public static final String INTERMEDIATE_DATA_ENCRYPTION_ALGO = "HmacSHA1"; private FileSystem fs; private HashMap<JobID, Job> jobs = new HashMap<JobID, Job>(); private JobConf conf; private AtomicInteger map_tasks = new AtomicInteger(0); private AtomicInteger reduce_tasks = new AtomicInteger(0); final Random rand = new Random(); private LocalJobRunnerMetrics myMetrics = null; private static final String jobDir = "localRunner/"; public long getProtocolVersion(String protocol, long clientVersion) { return ClientProtocol.versionID; } @Override public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int clientMethodsHash) throws IOException { return ProtocolSignature.getProtocolSignature( this, protocol, clientVersion, clientMethodsHash); } private class Job extends Thread implements TaskUmbilicalProtocol { // The job directory on the system: JobClient places job configurations here. // This is analogous to JobTracker's system directory. private Path systemJobDir; private Path systemJobFile; // The job directory for the task. Analagous to a task's job directory. private Path localJobDir; private Path localJobFile; private JobID id; private JobConf job; private int numMapTasks; private int numReduceTasks; private float [] partialMapProgress; private float [] partialReduceProgress; private Counters [] mapCounters; private Counters [] reduceCounters; private JobStatus status; private List<TaskAttemptID> mapIds = Collections.synchronizedList( new ArrayList<TaskAttemptID>()); private JobProfile profile; private FileSystem localFs; boolean killed = false; private LocalDistributedCacheManager localDistributedCacheManager; public long getProtocolVersion(String protocol, long clientVersion) { return TaskUmbilicalProtocol.versionID; } @Override public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int clientMethodsHash) throws IOException { return ProtocolSignature.getProtocolSignature( this, protocol, clientVersion, clientMethodsHash); } public Job(JobID jobid, String jobSubmitDir) throws IOException { this.systemJobDir = new Path(jobSubmitDir); this.systemJobFile = new Path(systemJobDir, "job.xml"); this.id = jobid; JobConf conf = new JobConf(systemJobFile); this.localFs = FileSystem.getLocal(conf); String user = UserGroupInformation.getCurrentUser().getShortUserName(); this.localJobDir = localFs.makeQualified(new Path( new Path(conf.getLocalPath(jobDir), user), jobid.toString())); this.localJobFile = new Path(this.localJobDir, id + ".xml"); // Manage the distributed cache. If there are files to be copied, // this will trigger localFile to be re-written again. localDistributedCacheManager = new LocalDistributedCacheManager(); localDistributedCacheManager.setup(conf, jobid); // Write out configuration file. Instead of copying it from // systemJobFile, we re-write it, since setup(), above, may have // updated it. OutputStream out = localFs.create(localJobFile); try { conf.writeXml(out); } finally { out.close(); } this.job = new JobConf(localJobFile); // Job (the current object) is a Thread, so we wrap its class loader. if (localDistributedCacheManager.hasLocalClasspaths()) { setContextClassLoader(localDistributedCacheManager.makeClassLoader( getContextClassLoader())); } profile = new JobProfile(job.getUser(), id, systemJobFile.toString(), "http://localhost:8080/", job.getJobName()); status = new JobStatus(id, 0.0f, 0.0f, JobStatus.RUNNING, profile.getUser(), profile.getJobName(), profile.getJobFile(), profile.getURL().toString()); jobs.put(id, this); if (CryptoUtils.isEncryptedSpillEnabled(job)) { try { int keyLen = conf.getInt( MRJobConfig.MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS, MRJobConfig .DEFAULT_MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS); KeyGenerator keyGen = KeyGenerator.getInstance(INTERMEDIATE_DATA_ENCRYPTION_ALGO); keyGen.init(keyLen); Credentials creds = UserGroupInformation.getCurrentUser().getCredentials(); TokenCache.setEncryptedSpillKey(keyGen.generateKey().getEncoded(), creds); UserGroupInformation.getCurrentUser().addCredentials(creds); } catch (NoSuchAlgorithmException e) { throw new IOException("Error generating encrypted spill key", e); } } this.start(); } protected abstract class RunnableWithThrowable implements Runnable { public volatile Throwable storedException; } /** * A Runnable instance that handles a map task to be run by an executor. */ protected class MapTaskRunnable extends RunnableWithThrowable { private final int taskId; private final TaskSplitMetaInfo info; private final JobID jobId; private final JobConf localConf; // This is a reference to a shared object passed in by the // external context; this delivers state to the reducers regarding // where to fetch mapper outputs. private final Map<TaskAttemptID, MapOutputFile> mapOutputFiles; public MapTaskRunnable(TaskSplitMetaInfo info, int taskId, JobID jobId, Map<TaskAttemptID, MapOutputFile> mapOutputFiles) { this.info = info; this.taskId = taskId; this.mapOutputFiles = mapOutputFiles; this.jobId = jobId; this.localConf = new JobConf(job); } public void run() { try { TaskAttemptID mapId = new TaskAttemptID(new TaskID( jobId, TaskType.MAP, taskId), 0); LOG.info("Starting task: " + mapId); mapIds.add(mapId); MapTask map = new MapTask(systemJobFile.toString(), mapId, taskId, info.getSplitIndex(), 1); map.setUser(UserGroupInformation.getCurrentUser(). getShortUserName()); setupChildMapredLocalDirs(map, localConf); MapOutputFile mapOutput = new MROutputFiles(); mapOutput.setConf(localConf); mapOutputFiles.put(mapId, mapOutput); map.setJobFile(localJobFile.toString()); localConf.setUser(map.getUser()); map.localizeConfiguration(localConf); map.setConf(localConf); try { map_tasks.getAndIncrement(); myMetrics.launchMap(mapId); map.run(localConf, Job.this); myMetrics.completeMap(mapId); } finally { map_tasks.getAndDecrement(); } LOG.info("Finishing task: " + mapId); } catch (Throwable e) { this.storedException = e; } } } /** * Create Runnables to encapsulate map tasks for use by the executor * service. * @param taskInfo Info about the map task splits * @param jobId the job id * @param mapOutputFiles a mapping from task attempts to output files * @return a List of Runnables, one per map task. */ protected List<RunnableWithThrowable> getMapTaskRunnables( TaskSplitMetaInfo [] taskInfo, JobID jobId, Map<TaskAttemptID, MapOutputFile> mapOutputFiles) { int numTasks = 0; ArrayList<RunnableWithThrowable> list = new ArrayList<RunnableWithThrowable>(); for (TaskSplitMetaInfo task : taskInfo) { list.add(new MapTaskRunnable(task, numTasks++, jobId, mapOutputFiles)); } return list; } protected class ReduceTaskRunnable extends RunnableWithThrowable { private final int taskId; private final JobID jobId; private final JobConf localConf; // This is a reference to a shared object passed in by the // external context; this delivers state to the reducers regarding // where to fetch mapper outputs. private final Map<TaskAttemptID, MapOutputFile> mapOutputFiles; public ReduceTaskRunnable(int taskId, JobID jobId, Map<TaskAttemptID, MapOutputFile> mapOutputFiles) { this.taskId = taskId; this.jobId = jobId; this.mapOutputFiles = mapOutputFiles; this.localConf = new JobConf(job); this.localConf.set("mapreduce.jobtracker.address", "local"); } public void run() { try { TaskAttemptID reduceId = new TaskAttemptID(new TaskID( jobId, TaskType.REDUCE, taskId), 0); LOG.info("Starting task: " + reduceId); ReduceTask reduce = new ReduceTask(systemJobFile.toString(), reduceId, taskId, mapIds.size(), 1); reduce.setUser(UserGroupInformation.getCurrentUser(). getShortUserName()); setupChildMapredLocalDirs(reduce, localConf); reduce.setLocalMapFiles(mapOutputFiles); if (!Job.this.isInterrupted()) { reduce.setJobFile(localJobFile.toString()); localConf.setUser(reduce.getUser()); reduce.localizeConfiguration(localConf); reduce.setConf(localConf); try { reduce_tasks.getAndIncrement(); myMetrics.launchReduce(reduce.getTaskID()); reduce.run(localConf, Job.this); myMetrics.completeReduce(reduce.getTaskID()); } finally { reduce_tasks.getAndDecrement(); } LOG.info("Finishing task: " + reduceId); } else { throw new InterruptedException(); } } catch (Throwable t) { // store this to be rethrown in the initial thread context. this.storedException = t; } } } /** * Create Runnables to encapsulate reduce tasks for use by the executor * service. * @param jobId the job id * @param mapOutputFiles a mapping from task attempts to output files * @return a List of Runnables, one per reduce task. */ protected List<RunnableWithThrowable> getReduceTaskRunnables( JobID jobId, Map<TaskAttemptID, MapOutputFile> mapOutputFiles) { int taskId = 0; ArrayList<RunnableWithThrowable> list = new ArrayList<RunnableWithThrowable>(); for (int i = 0; i < this.numReduceTasks; i++) { list.add(new ReduceTaskRunnable(taskId++, jobId, mapOutputFiles)); } return list; } /** * Initialize the counters that will hold partial-progress from * the various task attempts. * @param numMaps the number of map tasks in this job. */ private synchronized void initCounters(int numMaps, int numReduces) { // Initialize state trackers for all map tasks. this.partialMapProgress = new float[numMaps]; this.mapCounters = new Counters[numMaps]; for (int i = 0; i < numMaps; i++) { this.mapCounters[i] = new Counters(); } this.partialReduceProgress = new float[numReduces]; this.reduceCounters = new Counters[numReduces]; for (int i = 0; i < numReduces; i++) { this.reduceCounters[i] = new Counters(); } this.numMapTasks = numMaps; this.numReduceTasks = numReduces; } /** * Creates the executor service used to run map tasks. * * @return an ExecutorService instance that handles map tasks */ protected synchronized ExecutorService createMapExecutor() { // Determine the size of the thread pool to use int maxMapThreads = job.getInt(LOCAL_MAX_MAPS, 1); if (maxMapThreads < 1) { throw new IllegalArgumentException( "Configured " + LOCAL_MAX_MAPS + " must be >= 1"); } maxMapThreads = Math.min(maxMapThreads, this.numMapTasks); maxMapThreads = Math.max(maxMapThreads, 1); // In case of no tasks. LOG.debug("Starting mapper thread pool executor."); LOG.debug("Max local threads: " + maxMapThreads); LOG.debug("Map tasks to process: " + this.numMapTasks); // Create a new executor service to drain the work queue. ThreadFactory tf = new ThreadFactoryBuilder() .setNameFormat("LocalJobRunner Map Task Executor #%d") .build(); ExecutorService executor = HadoopExecutors.newFixedThreadPool( maxMapThreads, tf); return executor; } /** * Creates the executor service used to run reduce tasks. * * @return an ExecutorService instance that handles reduce tasks */ protected synchronized ExecutorService createReduceExecutor() { // Determine the size of the thread pool to use int maxReduceThreads = job.getInt(LOCAL_MAX_REDUCES, 1); if (maxReduceThreads < 1) { throw new IllegalArgumentException( "Configured " + LOCAL_MAX_REDUCES + " must be >= 1"); } maxReduceThreads = Math.min(maxReduceThreads, this.numReduceTasks); maxReduceThreads = Math.max(maxReduceThreads, 1); // In case of no tasks. LOG.debug("Starting reduce thread pool executor."); LOG.debug("Max local threads: " + maxReduceThreads); LOG.debug("Reduce tasks to process: " + this.numReduceTasks); // Create a new executor service to drain the work queue. ExecutorService executor = HadoopExecutors.newFixedThreadPool( maxReduceThreads); return executor; } /** Run a set of tasks and waits for them to complete. */ private void runTasks(List<RunnableWithThrowable> runnables, ExecutorService service, String taskType) throws Exception { // Start populating the executor with work units. // They may begin running immediately (in other threads). for (Runnable r : runnables) { service.submit(r); } try { service.shutdown(); // Instructs queue to drain. // Wait for tasks to finish; do not use a time-based timeout. // (See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6179024) LOG.info("Waiting for " + taskType + " tasks"); service.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException ie) { // Cancel all threads. service.shutdownNow(); throw ie; } LOG.info(taskType + " task executor complete."); // After waiting for the tasks to complete, if any of these // have thrown an exception, rethrow it now in the main thread context. for (RunnableWithThrowable r : runnables) { if (r.storedException != null) { throw new Exception(r.storedException); } } } private org.apache.hadoop.mapreduce.OutputCommitter createOutputCommitter(boolean newApiCommitter, JobID jobId, Configuration conf) throws Exception { org.apache.hadoop.mapreduce.OutputCommitter committer = null; LOG.info("OutputCommitter set in config " + conf.get("mapred.output.committer.class")); if (newApiCommitter) { org.apache.hadoop.mapreduce.TaskID taskId = new org.apache.hadoop.mapreduce.TaskID(jobId, TaskType.MAP, 0); org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID = new org.apache.hadoop.mapreduce.TaskAttemptID(taskId, 0); org.apache.hadoop.mapreduce.TaskAttemptContext taskContext = new TaskAttemptContextImpl(conf, taskAttemptID); OutputFormat outputFormat = ReflectionUtils.newInstance(taskContext.getOutputFormatClass(), conf); committer = outputFormat.getOutputCommitter(taskContext); } else { committer = ReflectionUtils.newInstance(conf.getClass( "mapred.output.committer.class", FileOutputCommitter.class, org.apache.hadoop.mapred.OutputCommitter.class), conf); } LOG.info("OutputCommitter is " + committer.getClass().getName()); return committer; } @Override public void run() { JobID jobId = profile.getJobID(); JobContext jContext = new JobContextImpl(job, jobId); org.apache.hadoop.mapreduce.OutputCommitter outputCommitter = null; try { outputCommitter = createOutputCommitter(conf.getUseNewMapper(), jobId, conf); } catch (Exception e) { LOG.info("Failed to createOutputCommitter", e); return; } try { TaskSplitMetaInfo[] taskSplitMetaInfos = SplitMetaInfoReader.readSplitMetaInfo(jobId, localFs, conf, systemJobDir); int numReduceTasks = job.getNumReduceTasks(); outputCommitter.setupJob(jContext); status.setSetupProgress(1.0f); Map<TaskAttemptID, MapOutputFile> mapOutputFiles = Collections.synchronizedMap(new HashMap<TaskAttemptID, MapOutputFile>()); List<RunnableWithThrowable> mapRunnables = getMapTaskRunnables( taskSplitMetaInfos, jobId, mapOutputFiles); initCounters(mapRunnables.size(), numReduceTasks); ExecutorService mapService = createMapExecutor(); runTasks(mapRunnables, mapService, "map"); try { if (numReduceTasks > 0) { List<RunnableWithThrowable> reduceRunnables = getReduceTaskRunnables( jobId, mapOutputFiles); ExecutorService reduceService = createReduceExecutor(); runTasks(reduceRunnables, reduceService, "reduce"); } } finally { for (MapOutputFile output : mapOutputFiles.values()) { output.removeAll(); } } // delete the temporary directory in output directory outputCommitter.commitJob(jContext); status.setCleanupProgress(1.0f); if (killed) { this.status.setRunState(JobStatus.KILLED); } else { this.status.setRunState(JobStatus.SUCCEEDED); } JobEndNotifier.localRunnerNotification(job, status); } catch (Throwable t) { try { outputCommitter.abortJob(jContext, org.apache.hadoop.mapreduce.JobStatus.State.FAILED); } catch (IOException ioe) { LOG.info("Error cleaning up job:" + id); } status.setCleanupProgress(1.0f); if (killed) { this.status.setRunState(JobStatus.KILLED); } else { this.status.setRunState(JobStatus.FAILED); } LOG.warn(id.toString(), t); JobEndNotifier.localRunnerNotification(job, status); } finally { try { try { // Cleanup distributed cache localDistributedCacheManager.close(); } finally { try { fs.delete(systemJobFile.getParent(), true); // delete submit dir } finally { localFs.delete(localJobFile, true); // delete local copy } } } catch (IOException e) { LOG.warn("Error cleaning up "+id+": "+e); } } } // TaskUmbilicalProtocol methods @Override public JvmTask getTask(JvmContext context) { return null; } @Override public synchronized AMFeedback statusUpdate(TaskAttemptID taskId, TaskStatus taskStatus) throws IOException, InterruptedException { AMFeedback feedback = new AMFeedback(); feedback.setTaskFound(true); if (null == taskStatus) { return feedback; } // Serialize as we would if distributed in order to make deep copy ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); taskStatus.write(dos); dos.close(); taskStatus = TaskStatus.createTaskStatus(taskStatus.getIsMap()); taskStatus.readFields(new DataInputStream( new ByteArrayInputStream(baos.toByteArray()))); LOG.info(taskStatus.getStateString()); int mapTaskIndex = mapIds.indexOf(taskId); if (mapTaskIndex >= 0) { // mapping float numTasks = (float) this.numMapTasks; partialMapProgress[mapTaskIndex] = taskStatus.getProgress(); mapCounters[mapTaskIndex] = taskStatus.getCounters(); float partialProgress = 0.0f; for (float f : partialMapProgress) { partialProgress += f; } status.setMapProgress(partialProgress / numTasks); } else { // reducing int reduceTaskIndex = taskId.getTaskID().getId(); float numTasks = (float) this.numReduceTasks; partialReduceProgress[reduceTaskIndex] = taskStatus.getProgress(); reduceCounters[reduceTaskIndex] = taskStatus.getCounters(); float partialProgress = 0.0f; for (float f : partialReduceProgress) { partialProgress += f; } status.setReduceProgress(partialProgress / numTasks); } // ignore phase return feedback; } /** Return the current values of the counters for this job, * including tasks that are in progress. */ public synchronized Counters getCurrentCounters() { if (null == mapCounters) { // Counters not yet initialized for job. return new Counters(); } Counters current = new Counters(); for (Counters c : mapCounters) { current = Counters.sum(current, c); } if (null != reduceCounters && reduceCounters.length > 0) { for (Counters c : reduceCounters) { current = Counters.sum(current, c); } } return current; } /** * Task is reporting that it is in commit_pending * and it is waiting for the commit Response */ public void commitPending(TaskAttemptID taskid, TaskStatus taskStatus) throws IOException, InterruptedException { statusUpdate(taskid, taskStatus); } @Override public void reportDiagnosticInfo(TaskAttemptID taskid, String trace) { // Ignore for now } @Override public void reportNextRecordRange(TaskAttemptID taskid, SortedRanges.Range range) throws IOException { LOG.info("Task " + taskid + " reportedNextRecordRange " + range); } @Override public boolean canCommit(TaskAttemptID taskid) throws IOException { return true; } @Override public void done(TaskAttemptID taskId) throws IOException { int taskIndex = mapIds.indexOf(taskId); if (taskIndex >= 0) { // mapping status.setMapProgress(1.0f); } else { status.setReduceProgress(1.0f); } } @Override public synchronized void fsError(TaskAttemptID taskId, String message) throws IOException { LOG.error("FSError: "+ message + "from task: " + taskId); } @Override public void shuffleError(TaskAttemptID taskId, String message) throws IOException { LOG.error("shuffleError: "+ message + "from task: " + taskId); } public synchronized void fatalError(TaskAttemptID taskId, String msg, boolean fastFail) throws IOException { LOG.error("Fatal: "+ msg + " from task: " + taskId + " fast fail: " + fastFail); } @Override public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId, int fromEventId, int maxLocs, TaskAttemptID id) throws IOException { return new MapTaskCompletionEventsUpdate( org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false); } @Override public void preempted(TaskAttemptID taskId, TaskStatus taskStatus) throws IOException, InterruptedException { // ignore } @Override public TaskCheckpointID getCheckpointID(TaskID taskId) { // ignore return null; } @Override public void setCheckpointID(TaskID downgrade, TaskCheckpointID cid) { // ignore } } public LocalJobRunner(Configuration conf) throws IOException { this(new JobConf(conf)); } @Deprecated public LocalJobRunner(JobConf conf) throws IOException { this.fs = FileSystem.getLocal(conf); this.conf = conf; myMetrics = LocalJobRunnerMetrics.create(); } // JobSubmissionProtocol methods private static int jobid = 0; // used for making sure that local jobs run in different jvms don't // collide on staging or job directories private int randid; public synchronized org.apache.hadoop.mapreduce.JobID getNewJobID() { return new org.apache.hadoop.mapreduce.JobID("local" + randid, ++jobid); } public org.apache.hadoop.mapreduce.JobStatus submitJob( org.apache.hadoop.mapreduce.JobID jobid, String jobSubmitDir, Credentials credentials) throws IOException { Job job = new Job(JobID.downgrade(jobid), jobSubmitDir); job.job.setCredentials(credentials); return job.status; } public void killJob(org.apache.hadoop.mapreduce.JobID id) { jobs.get(JobID.downgrade(id)).killed = true; jobs.get(JobID.downgrade(id)).interrupt(); } public void setJobPriority(org.apache.hadoop.mapreduce.JobID id, String jp) throws IOException { throw new UnsupportedOperationException("Changing job priority " + "in LocalJobRunner is not supported."); } /** Throws {@link UnsupportedOperationException} */ public boolean killTask(org.apache.hadoop.mapreduce.TaskAttemptID taskId, boolean shouldFail) throws IOException { throw new UnsupportedOperationException("Killing tasks in " + "LocalJobRunner is not supported"); } public org.apache.hadoop.mapreduce.TaskReport[] getTaskReports( org.apache.hadoop.mapreduce.JobID id, TaskType type) { return new org.apache.hadoop.mapreduce.TaskReport[0]; } public org.apache.hadoop.mapreduce.JobStatus getJobStatus( org.apache.hadoop.mapreduce.JobID id) { Job job = jobs.get(JobID.downgrade(id)); if(job != null) return job.status; else return null; } public org.apache.hadoop.mapreduce.Counters getJobCounters( org.apache.hadoop.mapreduce.JobID id) { Job job = jobs.get(JobID.downgrade(id)); return new org.apache.hadoop.mapreduce.Counters(job.getCurrentCounters()); } public String getFilesystemName() throws IOException { return fs.getUri().toString(); } public ClusterMetrics getClusterMetrics() { int numMapTasks = map_tasks.get(); int numReduceTasks = reduce_tasks.get(); return new ClusterMetrics(numMapTasks, numReduceTasks, numMapTasks, numReduceTasks, 0, 0, 1, 1, jobs.size(), 1, 0, 0); } public JobTrackerStatus getJobTrackerStatus() { return JobTrackerStatus.RUNNING; } public long getTaskTrackerExpiryInterval() throws IOException, InterruptedException { return 0; } /** * Get all active trackers in cluster. * @return array of TaskTrackerInfo */ public TaskTrackerInfo[] getActiveTrackers() throws IOException, InterruptedException { return new TaskTrackerInfo[0]; } /** * Get all blacklisted trackers in cluster. * @return array of TaskTrackerInfo */ public TaskTrackerInfo[] getBlacklistedTrackers() throws IOException, InterruptedException { return new TaskTrackerInfo[0]; } public TaskCompletionEvent[] getTaskCompletionEvents( org.apache.hadoop.mapreduce.JobID jobid , int fromEventId, int maxEvents) throws IOException { return TaskCompletionEvent.EMPTY_ARRAY; } public org.apache.hadoop.mapreduce.JobStatus[] getAllJobs() {return null;} /** * Returns the diagnostic information for a particular task in the given job. * To be implemented */ public String[] getTaskDiagnostics( org.apache.hadoop.mapreduce.TaskAttemptID taskid) throws IOException{ return new String [0]; } /** * @see org.apache.hadoop.mapreduce.protocol.ClientProtocol#getSystemDir() */ public String getSystemDir() { Path sysDir = new Path( conf.get(JTConfig.JT_SYSTEM_DIR, "/tmp/hadoop/mapred/system")); return fs.makeQualified(sysDir).toString(); } /** * @see org.apache.hadoop.mapreduce.protocol.ClientProtocol#getQueueAdmins(String) */ public AccessControlList getQueueAdmins(String queueName) throws IOException { return new AccessControlList(" ");// no queue admins for local job runner } /** * @see org.apache.hadoop.mapreduce.protocol.ClientProtocol#getStagingAreaDir() */ public String getStagingAreaDir() throws IOException { Path stagingRootDir = new Path(conf.get(JTConfig.JT_STAGING_AREA_ROOT, "/tmp/hadoop/mapred/staging")); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); String user; randid = rand.nextInt(Integer.MAX_VALUE); if (ugi != null) { user = ugi.getShortUserName() + randid; } else { user = "dummy" + randid; } return fs.makeQualified(new Path(stagingRootDir, user+"/.staging")).toString(); } public String getJobHistoryDir() { return null; } @Override public QueueInfo[] getChildQueues(String queueName) throws IOException { return null; } @Override public QueueInfo[] getRootQueues() throws IOException { return null; } @Override public QueueInfo[] getQueues() throws IOException { return null; } @Override public QueueInfo getQueue(String queue) throws IOException { return null; } @Override public org.apache.hadoop.mapreduce.QueueAclsInfo[] getQueueAclsForCurrentUser() throws IOException{ return null; } /** * Set the max number of map tasks to run concurrently in the LocalJobRunner. * @param job the job to configure * @param maxMaps the maximum number of map tasks to allow. */ public static void setLocalMaxRunningMaps( org.apache.hadoop.mapreduce.JobContext job, int maxMaps) { job.getConfiguration().setInt(LOCAL_MAX_MAPS, maxMaps); } /** * @return the max number of map tasks to run concurrently in the * LocalJobRunner. */ public static int getLocalMaxRunningMaps( org.apache.hadoop.mapreduce.JobContext job) { return job.getConfiguration().getInt(LOCAL_MAX_MAPS, 1); } /** * Set the max number of reduce tasks to run concurrently in the LocalJobRunner. * @param job the job to configure * @param maxReduces the maximum number of reduce tasks to allow. */ public static void setLocalMaxRunningReduces( org.apache.hadoop.mapreduce.JobContext job, int maxReduces) { job.getConfiguration().setInt(LOCAL_MAX_REDUCES, maxReduces); } /** * @return the max number of reduce tasks to run concurrently in the * LocalJobRunner. */ public static int getLocalMaxRunningReduces( org.apache.hadoop.mapreduce.JobContext job) { return job.getConfiguration().getInt(LOCAL_MAX_REDUCES, 1); } @Override public void cancelDelegationToken(Token<DelegationTokenIdentifier> token ) throws IOException, InterruptedException { } @Override public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException, InterruptedException { return null; } @Override public long renewDelegationToken(Token<DelegationTokenIdentifier> token ) throws IOException,InterruptedException{ return 0; } @Override public LogParams getLogFileParams(org.apache.hadoop.mapreduce.JobID jobID, org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID) throws IOException, InterruptedException { throw new UnsupportedOperationException("Not supported"); } static void setupChildMapredLocalDirs(Task t, JobConf conf) { String[] localDirs = conf.getTrimmedStrings(MRConfig.LOCAL_DIR); String jobId = t.getJobID().toString(); String taskId = t.getTaskID().toString(); boolean isCleanup = t.isTaskCleanupTask(); String user = t.getUser(); StringBuilder childMapredLocalDir = new StringBuilder(localDirs[0] + Path.SEPARATOR + getLocalTaskDir(user, jobId, taskId, isCleanup)); for (int i = 1; i < localDirs.length; i++) { childMapredLocalDir.append("," + localDirs[i] + Path.SEPARATOR + getLocalTaskDir(user, jobId, taskId, isCleanup)); } LOG.debug(MRConfig.LOCAL_DIR + " for child : " + childMapredLocalDir); conf.set(MRConfig.LOCAL_DIR, childMapredLocalDir.toString()); } static final String TASK_CLEANUP_SUFFIX = ".cleanup"; static final String JOBCACHE = "jobcache"; static String getLocalTaskDir(String user, String jobid, String taskid, boolean isCleanupAttempt) { String taskDir = jobDir + Path.SEPARATOR + user + Path.SEPARATOR + JOBCACHE + Path.SEPARATOR + jobid + Path.SEPARATOR + taskid; if (isCleanupAttempt) { taskDir = taskDir + TASK_CLEANUP_SUFFIX; } return taskDir; } }
googleapis/google-api-java-client-services
37,502
clients/google-api-services-retail/v2/1.31.0/com/google/api/services/retail/v2/model/GoogleCloudRetailV2UserEvent.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.retail.v2.model; /** * UserEvent captures all metadata information Retail API needs to know about how end users interact * with customers' website. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Retail API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudRetailV2UserEvent extends com.google.api.client.json.GenericJson { /** * Extra user event features to include in the recommendation model. If you provide custom * attributes for ingested user events, also include them in the user events that you associate * with prediction requests. Custom attribute formatting must be consistent between imported * events and events provided with prediction requests. This lets the Retail API use those custom * attributes when training models and serving predictions, which helps improve recommendation * quality. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is * returned: * The key must be a UTF-8 encoded string with a length limit of 5,000 characters. * * For text attributes, at most 400 values are allowed. Empty values are not allowed. Each value * must be a UTF-8 encoded string with a length limit of 256 characters. * For number attributes, * at most 400 values are allowed. For product recommendations, an example of extra user * information is traffic_channel, which is how a user arrives at the site. Users can arrive at * the site by coming to the site directly, coming through Google search, or in other ways. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, GoogleCloudRetailV2CustomAttribute> attributes; static { // hack to force ProGuard to consider GoogleCloudRetailV2CustomAttribute used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(GoogleCloudRetailV2CustomAttribute.class); } /** * Highly recommended for user events that are the result of PredictionService.Predict. This field * enables accurate attribution of recommendation model performance. The value must be a valid * PredictResponse.attribution_token for user events that are the result of * PredictionService.Predict. The value must be a valid SearchResponse.attribution_token for user * events that are the result of SearchService.Search. This token enables us to accurately * attribute page view or purchase back to the event and the particular predict response * containing this clicked/purchased product. If user clicks on product K in the recommendation * results, pass PredictResponse.attribution_token as a URL parameter to product K's page. When * recording events on product K's page, log the PredictResponse.attribution_token to this field. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String attributionToken; /** * The ID or name of the associated shopping cart. This ID is used to associate multiple items * added or present in the cart before purchase. This can only be set for `add-to-cart`, * `purchase-complete`, or `shopping-cart-page-view` events. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String cartId; /** * The main auto-completion details related to the event. This field should be set for `search` * event when autocomplete function is enabled and the user clicks a suggestion for search. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2CompletionDetail completionDetail; /** * Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event * happened. * The value may be {@code null}. */ @com.google.api.client.util.Key private String eventTime; /** * Required. User event type. Allowed values are: * `add-to-cart`: Products being added to cart. * * `category-page-view`: Special pages such as sale or promotion pages viewed. * `detail-page- * view`: Products detail page viewed. * `home-page-view`: Homepage viewed. * `promotion-offered`: * Promotion is offered to a user. * `promotion-not-offered`: Promotion is not offered to a user. * * `purchase-complete`: User finishing a purchase. * `search`: Product search. * `shopping-cart- * page-view`: User viewing a shopping cart. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String eventType; /** * A list of identifiers for the independent experiment groups this user event belongs to. This is * used to distinguish between user events associated with different experiment setups (e.g. using * Retail API, using different recommendation models). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> experimentIds; /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. See SearchRequest.filter for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String filter; /** * An integer that specifies the current offset for pagination (the 0-indexed starting location, * amongst the products deemed by the API as relevant). See SearchRequest.offset for definition. * If this field is negative, an INVALID_ARGUMENT is returned. This can only be set for `search` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer offset; /** * The order in which products are returned. See SearchRequest.order_by for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. This can only be set for `search` events. Other event types * should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String orderBy; /** * The categories associated with a category page. To represent full path of category, use '>' * sign to separate different hierarchies. If '>' is part of the category name, please replace it * with other character(s). Category pages include special pages such as sales or promotions. For * instance, a special sale page may have the category hierarchy: "pageCategories" : ["Sales > * 2017 Black Friday Deals"]. Required for `category-page-view` events. At least one of * search_query or page_categories is required for `search` events. Other event types should not * set this field. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> pageCategories; /** * A unique ID of a web page view. This should be kept the same for all user events triggered from * the same pageview. For example, an item detail page view could trigger multiple events as the * user is browsing the page. The `pageViewId` property should be kept the same for all these * events so that they can be grouped together properly. When using the client side event * reporting with JavaScript pixel and Google Tag Manager, this value is filled in automatically. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String pageViewId; /** * The main product details related to the event. This field is optional except for the following * event types: * `add-to-cart` * `detail-page-view` * `purchase-complete` In a `search` event, * this field represents the products returned to the end user on the current page (the end user * may have not finished browsing the whole page yet). When a new page is returned to the end * user, after pagination/filtering/ordering even for the same query, a new `search` event with * different product_details is desired. The end user may have not finished browsing the whole * page yet. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleCloudRetailV2ProductDetail> productDetails; static { // hack to force ProGuard to consider GoogleCloudRetailV2ProductDetail used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(GoogleCloudRetailV2ProductDetail.class); } /** * A transaction represents the entire purchase transaction. Required for `purchase-complete` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2PurchaseTransaction purchaseTransaction; /** * The referrer URL of the current page. When using the client side event reporting with * JavaScript pixel and Google Tag Manager, this value is filled in automatically. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String referrerUri; /** * The user's search query. See SearchRequest.query for definition. The value must be a UTF-8 * encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is * returned. At least one of search_query or page_categories is required for `search` events. * Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String searchQuery; /** * A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session * is an aggregation of an end user behavior in a time span. A general guideline to populate the * sesion_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The * session_id should be unique across users, suggest use uuid or add visitor_id as prefix. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sessionId; /** * Complete URL (window.location.href) of the user's current page. When using the client side * event reporting with JavaScript pixel and Google Tag Manager, this value is filled in * automatically. Maximum length 5,000 characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String uri; /** * User information. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2UserInfo userInfo; /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor log in/out of the website. Don't set * the field to the same fixed ID for different users. This mixes the event history of those users * together, which results in degraded model quality. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. The * field should not contain PII or user-data. We recommend to use Google Analytics [Client * ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field- * reference#clientId) for this field. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String visitorId; /** * Extra user event features to include in the recommendation model. If you provide custom * attributes for ingested user events, also include them in the user events that you associate * with prediction requests. Custom attribute formatting must be consistent between imported * events and events provided with prediction requests. This lets the Retail API use those custom * attributes when training models and serving predictions, which helps improve recommendation * quality. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is * returned: * The key must be a UTF-8 encoded string with a length limit of 5,000 characters. * * For text attributes, at most 400 values are allowed. Empty values are not allowed. Each value * must be a UTF-8 encoded string with a length limit of 256 characters. * For number attributes, * at most 400 values are allowed. For product recommendations, an example of extra user * information is traffic_channel, which is how a user arrives at the site. Users can arrive at * the site by coming to the site directly, coming through Google search, or in other ways. * @return value or {@code null} for none */ public java.util.Map<String, GoogleCloudRetailV2CustomAttribute> getAttributes() { return attributes; } /** * Extra user event features to include in the recommendation model. If you provide custom * attributes for ingested user events, also include them in the user events that you associate * with prediction requests. Custom attribute formatting must be consistent between imported * events and events provided with prediction requests. This lets the Retail API use those custom * attributes when training models and serving predictions, which helps improve recommendation * quality. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is * returned: * The key must be a UTF-8 encoded string with a length limit of 5,000 characters. * * For text attributes, at most 400 values are allowed. Empty values are not allowed. Each value * must be a UTF-8 encoded string with a length limit of 256 characters. * For number attributes, * at most 400 values are allowed. For product recommendations, an example of extra user * information is traffic_channel, which is how a user arrives at the site. Users can arrive at * the site by coming to the site directly, coming through Google search, or in other ways. * @param attributes attributes or {@code null} for none */ public GoogleCloudRetailV2UserEvent setAttributes(java.util.Map<String, GoogleCloudRetailV2CustomAttribute> attributes) { this.attributes = attributes; return this; } /** * Highly recommended for user events that are the result of PredictionService.Predict. This field * enables accurate attribution of recommendation model performance. The value must be a valid * PredictResponse.attribution_token for user events that are the result of * PredictionService.Predict. The value must be a valid SearchResponse.attribution_token for user * events that are the result of SearchService.Search. This token enables us to accurately * attribute page view or purchase back to the event and the particular predict response * containing this clicked/purchased product. If user clicks on product K in the recommendation * results, pass PredictResponse.attribution_token as a URL parameter to product K's page. When * recording events on product K's page, log the PredictResponse.attribution_token to this field. * @return value or {@code null} for none */ public java.lang.String getAttributionToken() { return attributionToken; } /** * Highly recommended for user events that are the result of PredictionService.Predict. This field * enables accurate attribution of recommendation model performance. The value must be a valid * PredictResponse.attribution_token for user events that are the result of * PredictionService.Predict. The value must be a valid SearchResponse.attribution_token for user * events that are the result of SearchService.Search. This token enables us to accurately * attribute page view or purchase back to the event and the particular predict response * containing this clicked/purchased product. If user clicks on product K in the recommendation * results, pass PredictResponse.attribution_token as a URL parameter to product K's page. When * recording events on product K's page, log the PredictResponse.attribution_token to this field. * @param attributionToken attributionToken or {@code null} for none */ public GoogleCloudRetailV2UserEvent setAttributionToken(java.lang.String attributionToken) { this.attributionToken = attributionToken; return this; } /** * The ID or name of the associated shopping cart. This ID is used to associate multiple items * added or present in the cart before purchase. This can only be set for `add-to-cart`, * `purchase-complete`, or `shopping-cart-page-view` events. * @return value or {@code null} for none */ public java.lang.String getCartId() { return cartId; } /** * The ID or name of the associated shopping cart. This ID is used to associate multiple items * added or present in the cart before purchase. This can only be set for `add-to-cart`, * `purchase-complete`, or `shopping-cart-page-view` events. * @param cartId cartId or {@code null} for none */ public GoogleCloudRetailV2UserEvent setCartId(java.lang.String cartId) { this.cartId = cartId; return this; } /** * The main auto-completion details related to the event. This field should be set for `search` * event when autocomplete function is enabled and the user clicks a suggestion for search. * @return value or {@code null} for none */ public GoogleCloudRetailV2CompletionDetail getCompletionDetail() { return completionDetail; } /** * The main auto-completion details related to the event. This field should be set for `search` * event when autocomplete function is enabled and the user clicks a suggestion for search. * @param completionDetail completionDetail or {@code null} for none */ public GoogleCloudRetailV2UserEvent setCompletionDetail(GoogleCloudRetailV2CompletionDetail completionDetail) { this.completionDetail = completionDetail; return this; } /** * Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event * happened. * @return value or {@code null} for none */ public String getEventTime() { return eventTime; } /** * Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event * happened. * @param eventTime eventTime or {@code null} for none */ public GoogleCloudRetailV2UserEvent setEventTime(String eventTime) { this.eventTime = eventTime; return this; } /** * Required. User event type. Allowed values are: * `add-to-cart`: Products being added to cart. * * `category-page-view`: Special pages such as sale or promotion pages viewed. * `detail-page- * view`: Products detail page viewed. * `home-page-view`: Homepage viewed. * `promotion-offered`: * Promotion is offered to a user. * `promotion-not-offered`: Promotion is not offered to a user. * * `purchase-complete`: User finishing a purchase. * `search`: Product search. * `shopping-cart- * page-view`: User viewing a shopping cart. * @return value or {@code null} for none */ public java.lang.String getEventType() { return eventType; } /** * Required. User event type. Allowed values are: * `add-to-cart`: Products being added to cart. * * `category-page-view`: Special pages such as sale or promotion pages viewed. * `detail-page- * view`: Products detail page viewed. * `home-page-view`: Homepage viewed. * `promotion-offered`: * Promotion is offered to a user. * `promotion-not-offered`: Promotion is not offered to a user. * * `purchase-complete`: User finishing a purchase. * `search`: Product search. * `shopping-cart- * page-view`: User viewing a shopping cart. * @param eventType eventType or {@code null} for none */ public GoogleCloudRetailV2UserEvent setEventType(java.lang.String eventType) { this.eventType = eventType; return this; } /** * A list of identifiers for the independent experiment groups this user event belongs to. This is * used to distinguish between user events associated with different experiment setups (e.g. using * Retail API, using different recommendation models). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getExperimentIds() { return experimentIds; } /** * A list of identifiers for the independent experiment groups this user event belongs to. This is * used to distinguish between user events associated with different experiment setups (e.g. using * Retail API, using different recommendation models). * @param experimentIds experimentIds or {@code null} for none */ public GoogleCloudRetailV2UserEvent setExperimentIds(java.util.List<java.lang.String> experimentIds) { this.experimentIds = experimentIds; return this; } /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. See SearchRequest.filter for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getFilter() { return filter; } /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. See SearchRequest.filter for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. * @param filter filter or {@code null} for none */ public GoogleCloudRetailV2UserEvent setFilter(java.lang.String filter) { this.filter = filter; return this; } /** * An integer that specifies the current offset for pagination (the 0-indexed starting location, * amongst the products deemed by the API as relevant). See SearchRequest.offset for definition. * If this field is negative, an INVALID_ARGUMENT is returned. This can only be set for `search` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @return value or {@code null} for none */ public java.lang.Integer getOffset() { return offset; } /** * An integer that specifies the current offset for pagination (the 0-indexed starting location, * amongst the products deemed by the API as relevant). See SearchRequest.offset for definition. * If this field is negative, an INVALID_ARGUMENT is returned. This can only be set for `search` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @param offset offset or {@code null} for none */ public GoogleCloudRetailV2UserEvent setOffset(java.lang.Integer offset) { this.offset = offset; return this; } /** * The order in which products are returned. See SearchRequest.order_by for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. This can only be set for `search` events. Other event types * should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getOrderBy() { return orderBy; } /** * The order in which products are returned. See SearchRequest.order_by for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. This can only be set for `search` events. Other event types * should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @param orderBy orderBy or {@code null} for none */ public GoogleCloudRetailV2UserEvent setOrderBy(java.lang.String orderBy) { this.orderBy = orderBy; return this; } /** * The categories associated with a category page. To represent full path of category, use '>' * sign to separate different hierarchies. If '>' is part of the category name, please replace it * with other character(s). Category pages include special pages such as sales or promotions. For * instance, a special sale page may have the category hierarchy: "pageCategories" : ["Sales > * 2017 Black Friday Deals"]. Required for `category-page-view` events. At least one of * search_query or page_categories is required for `search` events. Other event types should not * set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getPageCategories() { return pageCategories; } /** * The categories associated with a category page. To represent full path of category, use '>' * sign to separate different hierarchies. If '>' is part of the category name, please replace it * with other character(s). Category pages include special pages such as sales or promotions. For * instance, a special sale page may have the category hierarchy: "pageCategories" : ["Sales > * 2017 Black Friday Deals"]. Required for `category-page-view` events. At least one of * search_query or page_categories is required for `search` events. Other event types should not * set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @param pageCategories pageCategories or {@code null} for none */ public GoogleCloudRetailV2UserEvent setPageCategories(java.util.List<java.lang.String> pageCategories) { this.pageCategories = pageCategories; return this; } /** * A unique ID of a web page view. This should be kept the same for all user events triggered from * the same pageview. For example, an item detail page view could trigger multiple events as the * user is browsing the page. The `pageViewId` property should be kept the same for all these * events so that they can be grouped together properly. When using the client side event * reporting with JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @return value or {@code null} for none */ public java.lang.String getPageViewId() { return pageViewId; } /** * A unique ID of a web page view. This should be kept the same for all user events triggered from * the same pageview. For example, an item detail page view could trigger multiple events as the * user is browsing the page. The `pageViewId` property should be kept the same for all these * events so that they can be grouped together properly. When using the client side event * reporting with JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @param pageViewId pageViewId or {@code null} for none */ public GoogleCloudRetailV2UserEvent setPageViewId(java.lang.String pageViewId) { this.pageViewId = pageViewId; return this; } /** * The main product details related to the event. This field is optional except for the following * event types: * `add-to-cart` * `detail-page-view` * `purchase-complete` In a `search` event, * this field represents the products returned to the end user on the current page (the end user * may have not finished browsing the whole page yet). When a new page is returned to the end * user, after pagination/filtering/ordering even for the same query, a new `search` event with * different product_details is desired. The end user may have not finished browsing the whole * page yet. * @return value or {@code null} for none */ public java.util.List<GoogleCloudRetailV2ProductDetail> getProductDetails() { return productDetails; } /** * The main product details related to the event. This field is optional except for the following * event types: * `add-to-cart` * `detail-page-view` * `purchase-complete` In a `search` event, * this field represents the products returned to the end user on the current page (the end user * may have not finished browsing the whole page yet). When a new page is returned to the end * user, after pagination/filtering/ordering even for the same query, a new `search` event with * different product_details is desired. The end user may have not finished browsing the whole * page yet. * @param productDetails productDetails or {@code null} for none */ public GoogleCloudRetailV2UserEvent setProductDetails(java.util.List<GoogleCloudRetailV2ProductDetail> productDetails) { this.productDetails = productDetails; return this; } /** * A transaction represents the entire purchase transaction. Required for `purchase-complete` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @return value or {@code null} for none */ public GoogleCloudRetailV2PurchaseTransaction getPurchaseTransaction() { return purchaseTransaction; } /** * A transaction represents the entire purchase transaction. Required for `purchase-complete` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @param purchaseTransaction purchaseTransaction or {@code null} for none */ public GoogleCloudRetailV2UserEvent setPurchaseTransaction(GoogleCloudRetailV2PurchaseTransaction purchaseTransaction) { this.purchaseTransaction = purchaseTransaction; return this; } /** * The referrer URL of the current page. When using the client side event reporting with * JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @return value or {@code null} for none */ public java.lang.String getReferrerUri() { return referrerUri; } /** * The referrer URL of the current page. When using the client side event reporting with * JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @param referrerUri referrerUri or {@code null} for none */ public GoogleCloudRetailV2UserEvent setReferrerUri(java.lang.String referrerUri) { this.referrerUri = referrerUri; return this; } /** * The user's search query. See SearchRequest.query for definition. The value must be a UTF-8 * encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is * returned. At least one of search_query or page_categories is required for `search` events. * Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getSearchQuery() { return searchQuery; } /** * The user's search query. See SearchRequest.query for definition. The value must be a UTF-8 * encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is * returned. At least one of search_query or page_categories is required for `search` events. * Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @param searchQuery searchQuery or {@code null} for none */ public GoogleCloudRetailV2UserEvent setSearchQuery(java.lang.String searchQuery) { this.searchQuery = searchQuery; return this; } /** * A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session * is an aggregation of an end user behavior in a time span. A general guideline to populate the * sesion_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The * session_id should be unique across users, suggest use uuid or add visitor_id as prefix. * @return value or {@code null} for none */ public java.lang.String getSessionId() { return sessionId; } /** * A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session * is an aggregation of an end user behavior in a time span. A general guideline to populate the * sesion_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The * session_id should be unique across users, suggest use uuid or add visitor_id as prefix. * @param sessionId sessionId or {@code null} for none */ public GoogleCloudRetailV2UserEvent setSessionId(java.lang.String sessionId) { this.sessionId = sessionId; return this; } /** * Complete URL (window.location.href) of the user's current page. When using the client side * event reporting with JavaScript pixel and Google Tag Manager, this value is filled in * automatically. Maximum length 5,000 characters. * @return value or {@code null} for none */ public java.lang.String getUri() { return uri; } /** * Complete URL (window.location.href) of the user's current page. When using the client side * event reporting with JavaScript pixel and Google Tag Manager, this value is filled in * automatically. Maximum length 5,000 characters. * @param uri uri or {@code null} for none */ public GoogleCloudRetailV2UserEvent setUri(java.lang.String uri) { this.uri = uri; return this; } /** * User information. * @return value or {@code null} for none */ public GoogleCloudRetailV2UserInfo getUserInfo() { return userInfo; } /** * User information. * @param userInfo userInfo or {@code null} for none */ public GoogleCloudRetailV2UserEvent setUserInfo(GoogleCloudRetailV2UserInfo userInfo) { this.userInfo = userInfo; return this; } /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor log in/out of the website. Don't set * the field to the same fixed ID for different users. This mixes the event history of those users * together, which results in degraded model quality. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. The * field should not contain PII or user-data. We recommend to use Google Analytics [Client * ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field- * reference#clientId) for this field. * @return value or {@code null} for none */ public java.lang.String getVisitorId() { return visitorId; } /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor log in/out of the website. Don't set * the field to the same fixed ID for different users. This mixes the event history of those users * together, which results in degraded model quality. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. The * field should not contain PII or user-data. We recommend to use Google Analytics [Client * ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field- * reference#clientId) for this field. * @param visitorId visitorId or {@code null} for none */ public GoogleCloudRetailV2UserEvent setVisitorId(java.lang.String visitorId) { this.visitorId = visitorId; return this; } @Override public GoogleCloudRetailV2UserEvent set(String fieldName, Object value) { return (GoogleCloudRetailV2UserEvent) super.set(fieldName, value); } @Override public GoogleCloudRetailV2UserEvent clone() { return (GoogleCloudRetailV2UserEvent) super.clone(); } }
googleads/google-ads-java
37,239
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/common/TagSnippet.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/common/tag_snippet.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.common; /** * <pre> * The site tag and event snippet pair for a TrackingCodeType. * </pre> * * Protobuf type {@code google.ads.googleads.v19.common.TagSnippet} */ public final class TagSnippet extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.common.TagSnippet) TagSnippetOrBuilder { private static final long serialVersionUID = 0L; // Use TagSnippet.newBuilder() to construct. private TagSnippet(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TagSnippet() { type_ = 0; pageFormat_ = 0; globalSiteTag_ = ""; eventSnippet_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new TagSnippet(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.common.TagSnippetProto.internal_static_google_ads_googleads_v19_common_TagSnippet_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.common.TagSnippetProto.internal_static_google_ads_googleads_v19_common_TagSnippet_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.common.TagSnippet.class, com.google.ads.googleads.v19.common.TagSnippet.Builder.class); } private int bitField0_; public static final int TYPE_FIELD_NUMBER = 1; private int type_ = 0; /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The type. */ @java.lang.Override public com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType getType() { com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType result = com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType.forNumber(type_); return result == null ? com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType.UNRECOGNIZED : result; } public static final int PAGE_FORMAT_FIELD_NUMBER = 2; private int pageFormat_ = 0; /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The enum numeric value on the wire for pageFormat. */ @java.lang.Override public int getPageFormatValue() { return pageFormat_; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The pageFormat. */ @java.lang.Override public com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat getPageFormat() { com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat result = com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.forNumber(pageFormat_); return result == null ? com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNRECOGNIZED : result; } public static final int GLOBAL_SITE_TAG_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object globalSiteTag_ = ""; /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return Whether the globalSiteTag field is set. */ @java.lang.Override public boolean hasGlobalSiteTag() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The globalSiteTag. */ @java.lang.Override public java.lang.String getGlobalSiteTag() { java.lang.Object ref = globalSiteTag_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); globalSiteTag_ = s; return s; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The bytes for globalSiteTag. */ @java.lang.Override public com.google.protobuf.ByteString getGlobalSiteTagBytes() { java.lang.Object ref = globalSiteTag_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); globalSiteTag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EVENT_SNIPPET_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object eventSnippet_ = ""; /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return Whether the eventSnippet field is set. */ @java.lang.Override public boolean hasEventSnippet() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The eventSnippet. */ @java.lang.Override public java.lang.String getEventSnippet() { java.lang.Object ref = eventSnippet_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventSnippet_ = s; return s; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The bytes for eventSnippet. */ @java.lang.Override public com.google.protobuf.ByteString getEventSnippetBytes() { java.lang.Object ref = eventSnippet_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); eventSnippet_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (type_ != com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType.UNSPECIFIED.getNumber()) { output.writeEnum(1, type_); } if (pageFormat_ != com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNSPECIFIED.getNumber()) { output.writeEnum(2, pageFormat_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, globalSiteTag_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, eventSnippet_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (type_ != com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, type_); } if (pageFormat_ != com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, pageFormat_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, globalSiteTag_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, eventSnippet_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.common.TagSnippet)) { return super.equals(obj); } com.google.ads.googleads.v19.common.TagSnippet other = (com.google.ads.googleads.v19.common.TagSnippet) obj; if (type_ != other.type_) return false; if (pageFormat_ != other.pageFormat_) return false; if (hasGlobalSiteTag() != other.hasGlobalSiteTag()) return false; if (hasGlobalSiteTag()) { if (!getGlobalSiteTag() .equals(other.getGlobalSiteTag())) return false; } if (hasEventSnippet() != other.hasEventSnippet()) return false; if (hasEventSnippet()) { if (!getEventSnippet() .equals(other.getEventSnippet())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (37 * hash) + PAGE_FORMAT_FIELD_NUMBER; hash = (53 * hash) + pageFormat_; if (hasGlobalSiteTag()) { hash = (37 * hash) + GLOBAL_SITE_TAG_FIELD_NUMBER; hash = (53 * hash) + getGlobalSiteTag().hashCode(); } if (hasEventSnippet()) { hash = (37 * hash) + EVENT_SNIPPET_FIELD_NUMBER; hash = (53 * hash) + getEventSnippet().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.common.TagSnippet parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.TagSnippet parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.TagSnippet parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.common.TagSnippet prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The site tag and event snippet pair for a TrackingCodeType. * </pre> * * Protobuf type {@code google.ads.googleads.v19.common.TagSnippet} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.common.TagSnippet) com.google.ads.googleads.v19.common.TagSnippetOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.common.TagSnippetProto.internal_static_google_ads_googleads_v19_common_TagSnippet_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.common.TagSnippetProto.internal_static_google_ads_googleads_v19_common_TagSnippet_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.common.TagSnippet.class, com.google.ads.googleads.v19.common.TagSnippet.Builder.class); } // Construct using com.google.ads.googleads.v19.common.TagSnippet.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; type_ = 0; pageFormat_ = 0; globalSiteTag_ = ""; eventSnippet_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.common.TagSnippetProto.internal_static_google_ads_googleads_v19_common_TagSnippet_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.common.TagSnippet getDefaultInstanceForType() { return com.google.ads.googleads.v19.common.TagSnippet.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.common.TagSnippet build() { com.google.ads.googleads.v19.common.TagSnippet result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.common.TagSnippet buildPartial() { com.google.ads.googleads.v19.common.TagSnippet result = new com.google.ads.googleads.v19.common.TagSnippet(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.common.TagSnippet result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.type_ = type_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageFormat_ = pageFormat_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.globalSiteTag_ = globalSiteTag_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000008) != 0)) { result.eventSnippet_ = eventSnippet_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.common.TagSnippet) { return mergeFrom((com.google.ads.googleads.v19.common.TagSnippet)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.common.TagSnippet other) { if (other == com.google.ads.googleads.v19.common.TagSnippet.getDefaultInstance()) return this; if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } if (other.pageFormat_ != 0) { setPageFormatValue(other.getPageFormatValue()); } if (other.hasGlobalSiteTag()) { globalSiteTag_ = other.globalSiteTag_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasEventSnippet()) { eventSnippet_ = other.eventSnippet_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { type_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { pageFormat_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 42: { globalSiteTag_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 case 50: { eventSnippet_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int type_ = 0; /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The type. */ @java.lang.Override public com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType getType() { com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType result = com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType.forNumber(type_); return result == null ? com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType.UNRECOGNIZED : result; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @param value The type to set. * @return This builder for chaining. */ public Builder setType(com.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; type_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = 0; onChanged(); return this; } private int pageFormat_ = 0; /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The enum numeric value on the wire for pageFormat. */ @java.lang.Override public int getPageFormatValue() { return pageFormat_; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @param value The enum numeric value on the wire for pageFormat to set. * @return This builder for chaining. */ public Builder setPageFormatValue(int value) { pageFormat_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The pageFormat. */ @java.lang.Override public com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat getPageFormat() { com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat result = com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.forNumber(pageFormat_); return result == null ? com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNRECOGNIZED : result; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @param value The pageFormat to set. * @return This builder for chaining. */ public Builder setPageFormat(com.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; pageFormat_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v19.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return This builder for chaining. */ public Builder clearPageFormat() { bitField0_ = (bitField0_ & ~0x00000002); pageFormat_ = 0; onChanged(); return this; } private java.lang.Object globalSiteTag_ = ""; /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return Whether the globalSiteTag field is set. */ public boolean hasGlobalSiteTag() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The globalSiteTag. */ public java.lang.String getGlobalSiteTag() { java.lang.Object ref = globalSiteTag_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); globalSiteTag_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The bytes for globalSiteTag. */ public com.google.protobuf.ByteString getGlobalSiteTagBytes() { java.lang.Object ref = globalSiteTag_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); globalSiteTag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @param value The globalSiteTag to set. * @return This builder for chaining. */ public Builder setGlobalSiteTag( java.lang.String value) { if (value == null) { throw new NullPointerException(); } globalSiteTag_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return This builder for chaining. */ public Builder clearGlobalSiteTag() { globalSiteTag_ = getDefaultInstance().getGlobalSiteTag(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @param value The bytes for globalSiteTag to set. * @return This builder for chaining. */ public Builder setGlobalSiteTagBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); globalSiteTag_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object eventSnippet_ = ""; /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return Whether the eventSnippet field is set. */ public boolean hasEventSnippet() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The eventSnippet. */ public java.lang.String getEventSnippet() { java.lang.Object ref = eventSnippet_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventSnippet_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The bytes for eventSnippet. */ public com.google.protobuf.ByteString getEventSnippetBytes() { java.lang.Object ref = eventSnippet_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); eventSnippet_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @param value The eventSnippet to set. * @return This builder for chaining. */ public Builder setEventSnippet( java.lang.String value) { if (value == null) { throw new NullPointerException(); } eventSnippet_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return This builder for chaining. */ public Builder clearEventSnippet() { eventSnippet_ = getDefaultInstance().getEventSnippet(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @param value The bytes for eventSnippet to set. * @return This builder for chaining. */ public Builder setEventSnippetBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); eventSnippet_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.common.TagSnippet) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.common.TagSnippet) private static final com.google.ads.googleads.v19.common.TagSnippet DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.common.TagSnippet(); } public static com.google.ads.googleads.v19.common.TagSnippet getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TagSnippet> PARSER = new com.google.protobuf.AbstractParser<TagSnippet>() { @java.lang.Override public TagSnippet parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TagSnippet> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TagSnippet> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.common.TagSnippet getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
37,239
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/common/TagSnippet.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/common/tag_snippet.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.common; /** * <pre> * The site tag and event snippet pair for a TrackingCodeType. * </pre> * * Protobuf type {@code google.ads.googleads.v20.common.TagSnippet} */ public final class TagSnippet extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.common.TagSnippet) TagSnippetOrBuilder { private static final long serialVersionUID = 0L; // Use TagSnippet.newBuilder() to construct. private TagSnippet(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TagSnippet() { type_ = 0; pageFormat_ = 0; globalSiteTag_ = ""; eventSnippet_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new TagSnippet(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.common.TagSnippetProto.internal_static_google_ads_googleads_v20_common_TagSnippet_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.common.TagSnippetProto.internal_static_google_ads_googleads_v20_common_TagSnippet_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.common.TagSnippet.class, com.google.ads.googleads.v20.common.TagSnippet.Builder.class); } private int bitField0_; public static final int TYPE_FIELD_NUMBER = 1; private int type_ = 0; /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The type. */ @java.lang.Override public com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType getType() { com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType result = com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType.forNumber(type_); return result == null ? com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType.UNRECOGNIZED : result; } public static final int PAGE_FORMAT_FIELD_NUMBER = 2; private int pageFormat_ = 0; /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The enum numeric value on the wire for pageFormat. */ @java.lang.Override public int getPageFormatValue() { return pageFormat_; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The pageFormat. */ @java.lang.Override public com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat getPageFormat() { com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat result = com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.forNumber(pageFormat_); return result == null ? com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNRECOGNIZED : result; } public static final int GLOBAL_SITE_TAG_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object globalSiteTag_ = ""; /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return Whether the globalSiteTag field is set. */ @java.lang.Override public boolean hasGlobalSiteTag() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The globalSiteTag. */ @java.lang.Override public java.lang.String getGlobalSiteTag() { java.lang.Object ref = globalSiteTag_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); globalSiteTag_ = s; return s; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The bytes for globalSiteTag. */ @java.lang.Override public com.google.protobuf.ByteString getGlobalSiteTagBytes() { java.lang.Object ref = globalSiteTag_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); globalSiteTag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EVENT_SNIPPET_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object eventSnippet_ = ""; /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return Whether the eventSnippet field is set. */ @java.lang.Override public boolean hasEventSnippet() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The eventSnippet. */ @java.lang.Override public java.lang.String getEventSnippet() { java.lang.Object ref = eventSnippet_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventSnippet_ = s; return s; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The bytes for eventSnippet. */ @java.lang.Override public com.google.protobuf.ByteString getEventSnippetBytes() { java.lang.Object ref = eventSnippet_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); eventSnippet_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (type_ != com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType.UNSPECIFIED.getNumber()) { output.writeEnum(1, type_); } if (pageFormat_ != com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNSPECIFIED.getNumber()) { output.writeEnum(2, pageFormat_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, globalSiteTag_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, eventSnippet_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (type_ != com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, type_); } if (pageFormat_ != com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, pageFormat_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, globalSiteTag_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, eventSnippet_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.common.TagSnippet)) { return super.equals(obj); } com.google.ads.googleads.v20.common.TagSnippet other = (com.google.ads.googleads.v20.common.TagSnippet) obj; if (type_ != other.type_) return false; if (pageFormat_ != other.pageFormat_) return false; if (hasGlobalSiteTag() != other.hasGlobalSiteTag()) return false; if (hasGlobalSiteTag()) { if (!getGlobalSiteTag() .equals(other.getGlobalSiteTag())) return false; } if (hasEventSnippet() != other.hasEventSnippet()) return false; if (hasEventSnippet()) { if (!getEventSnippet() .equals(other.getEventSnippet())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (37 * hash) + PAGE_FORMAT_FIELD_NUMBER; hash = (53 * hash) + pageFormat_; if (hasGlobalSiteTag()) { hash = (37 * hash) + GLOBAL_SITE_TAG_FIELD_NUMBER; hash = (53 * hash) + getGlobalSiteTag().hashCode(); } if (hasEventSnippet()) { hash = (37 * hash) + EVENT_SNIPPET_FIELD_NUMBER; hash = (53 * hash) + getEventSnippet().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.common.TagSnippet parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.TagSnippet parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.TagSnippet parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.common.TagSnippet prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The site tag and event snippet pair for a TrackingCodeType. * </pre> * * Protobuf type {@code google.ads.googleads.v20.common.TagSnippet} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.common.TagSnippet) com.google.ads.googleads.v20.common.TagSnippetOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.common.TagSnippetProto.internal_static_google_ads_googleads_v20_common_TagSnippet_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.common.TagSnippetProto.internal_static_google_ads_googleads_v20_common_TagSnippet_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.common.TagSnippet.class, com.google.ads.googleads.v20.common.TagSnippet.Builder.class); } // Construct using com.google.ads.googleads.v20.common.TagSnippet.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; type_ = 0; pageFormat_ = 0; globalSiteTag_ = ""; eventSnippet_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.common.TagSnippetProto.internal_static_google_ads_googleads_v20_common_TagSnippet_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.common.TagSnippet getDefaultInstanceForType() { return com.google.ads.googleads.v20.common.TagSnippet.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.common.TagSnippet build() { com.google.ads.googleads.v20.common.TagSnippet result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.common.TagSnippet buildPartial() { com.google.ads.googleads.v20.common.TagSnippet result = new com.google.ads.googleads.v20.common.TagSnippet(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.common.TagSnippet result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.type_ = type_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageFormat_ = pageFormat_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.globalSiteTag_ = globalSiteTag_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000008) != 0)) { result.eventSnippet_ = eventSnippet_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.common.TagSnippet) { return mergeFrom((com.google.ads.googleads.v20.common.TagSnippet)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.common.TagSnippet other) { if (other == com.google.ads.googleads.v20.common.TagSnippet.getDefaultInstance()) return this; if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } if (other.pageFormat_ != 0) { setPageFormatValue(other.getPageFormatValue()); } if (other.hasGlobalSiteTag()) { globalSiteTag_ = other.globalSiteTag_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasEventSnippet()) { eventSnippet_ = other.eventSnippet_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { type_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { pageFormat_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 42: { globalSiteTag_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 case 50: { eventSnippet_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int type_ = 0; /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The type. */ @java.lang.Override public com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType getType() { com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType result = com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType.forNumber(type_); return result == null ? com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType.UNRECOGNIZED : result; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @param value The type to set. * @return This builder for chaining. */ public Builder setType(com.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; type_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = 0; onChanged(); return this; } private int pageFormat_ = 0; /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The enum numeric value on the wire for pageFormat. */ @java.lang.Override public int getPageFormatValue() { return pageFormat_; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @param value The enum numeric value on the wire for pageFormat to set. * @return This builder for chaining. */ public Builder setPageFormatValue(int value) { pageFormat_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The pageFormat. */ @java.lang.Override public com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat getPageFormat() { com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat result = com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.forNumber(pageFormat_); return result == null ? com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNRECOGNIZED : result; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @param value The pageFormat to set. * @return This builder for chaining. */ public Builder setPageFormat(com.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; pageFormat_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v20.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return This builder for chaining. */ public Builder clearPageFormat() { bitField0_ = (bitField0_ & ~0x00000002); pageFormat_ = 0; onChanged(); return this; } private java.lang.Object globalSiteTag_ = ""; /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return Whether the globalSiteTag field is set. */ public boolean hasGlobalSiteTag() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The globalSiteTag. */ public java.lang.String getGlobalSiteTag() { java.lang.Object ref = globalSiteTag_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); globalSiteTag_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The bytes for globalSiteTag. */ public com.google.protobuf.ByteString getGlobalSiteTagBytes() { java.lang.Object ref = globalSiteTag_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); globalSiteTag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @param value The globalSiteTag to set. * @return This builder for chaining. */ public Builder setGlobalSiteTag( java.lang.String value) { if (value == null) { throw new NullPointerException(); } globalSiteTag_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return This builder for chaining. */ public Builder clearGlobalSiteTag() { globalSiteTag_ = getDefaultInstance().getGlobalSiteTag(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @param value The bytes for globalSiteTag to set. * @return This builder for chaining. */ public Builder setGlobalSiteTagBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); globalSiteTag_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object eventSnippet_ = ""; /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return Whether the eventSnippet field is set. */ public boolean hasEventSnippet() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The eventSnippet. */ public java.lang.String getEventSnippet() { java.lang.Object ref = eventSnippet_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventSnippet_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The bytes for eventSnippet. */ public com.google.protobuf.ByteString getEventSnippetBytes() { java.lang.Object ref = eventSnippet_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); eventSnippet_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @param value The eventSnippet to set. * @return This builder for chaining. */ public Builder setEventSnippet( java.lang.String value) { if (value == null) { throw new NullPointerException(); } eventSnippet_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return This builder for chaining. */ public Builder clearEventSnippet() { eventSnippet_ = getDefaultInstance().getEventSnippet(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @param value The bytes for eventSnippet to set. * @return This builder for chaining. */ public Builder setEventSnippetBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); eventSnippet_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.common.TagSnippet) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.common.TagSnippet) private static final com.google.ads.googleads.v20.common.TagSnippet DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.common.TagSnippet(); } public static com.google.ads.googleads.v20.common.TagSnippet getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TagSnippet> PARSER = new com.google.protobuf.AbstractParser<TagSnippet>() { @java.lang.Override public TagSnippet parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TagSnippet> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TagSnippet> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.common.TagSnippet getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
37,239
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/TagSnippet.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/common/tag_snippet.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.common; /** * <pre> * The site tag and event snippet pair for a TrackingCodeType. * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.TagSnippet} */ public final class TagSnippet extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.TagSnippet) TagSnippetOrBuilder { private static final long serialVersionUID = 0L; // Use TagSnippet.newBuilder() to construct. private TagSnippet(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TagSnippet() { type_ = 0; pageFormat_ = 0; globalSiteTag_ = ""; eventSnippet_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new TagSnippet(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.TagSnippetProto.internal_static_google_ads_googleads_v21_common_TagSnippet_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.TagSnippetProto.internal_static_google_ads_googleads_v21_common_TagSnippet_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.TagSnippet.class, com.google.ads.googleads.v21.common.TagSnippet.Builder.class); } private int bitField0_; public static final int TYPE_FIELD_NUMBER = 1; private int type_ = 0; /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The type. */ @java.lang.Override public com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType getType() { com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType result = com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType.forNumber(type_); return result == null ? com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType.UNRECOGNIZED : result; } public static final int PAGE_FORMAT_FIELD_NUMBER = 2; private int pageFormat_ = 0; /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The enum numeric value on the wire for pageFormat. */ @java.lang.Override public int getPageFormatValue() { return pageFormat_; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The pageFormat. */ @java.lang.Override public com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat getPageFormat() { com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat result = com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.forNumber(pageFormat_); return result == null ? com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNRECOGNIZED : result; } public static final int GLOBAL_SITE_TAG_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object globalSiteTag_ = ""; /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return Whether the globalSiteTag field is set. */ @java.lang.Override public boolean hasGlobalSiteTag() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The globalSiteTag. */ @java.lang.Override public java.lang.String getGlobalSiteTag() { java.lang.Object ref = globalSiteTag_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); globalSiteTag_ = s; return s; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The bytes for globalSiteTag. */ @java.lang.Override public com.google.protobuf.ByteString getGlobalSiteTagBytes() { java.lang.Object ref = globalSiteTag_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); globalSiteTag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EVENT_SNIPPET_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object eventSnippet_ = ""; /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return Whether the eventSnippet field is set. */ @java.lang.Override public boolean hasEventSnippet() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The eventSnippet. */ @java.lang.Override public java.lang.String getEventSnippet() { java.lang.Object ref = eventSnippet_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventSnippet_ = s; return s; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The bytes for eventSnippet. */ @java.lang.Override public com.google.protobuf.ByteString getEventSnippetBytes() { java.lang.Object ref = eventSnippet_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); eventSnippet_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (type_ != com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType.UNSPECIFIED.getNumber()) { output.writeEnum(1, type_); } if (pageFormat_ != com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNSPECIFIED.getNumber()) { output.writeEnum(2, pageFormat_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, globalSiteTag_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, eventSnippet_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (type_ != com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, type_); } if (pageFormat_ != com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, pageFormat_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, globalSiteTag_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, eventSnippet_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.common.TagSnippet)) { return super.equals(obj); } com.google.ads.googleads.v21.common.TagSnippet other = (com.google.ads.googleads.v21.common.TagSnippet) obj; if (type_ != other.type_) return false; if (pageFormat_ != other.pageFormat_) return false; if (hasGlobalSiteTag() != other.hasGlobalSiteTag()) return false; if (hasGlobalSiteTag()) { if (!getGlobalSiteTag() .equals(other.getGlobalSiteTag())) return false; } if (hasEventSnippet() != other.hasEventSnippet()) return false; if (hasEventSnippet()) { if (!getEventSnippet() .equals(other.getEventSnippet())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + type_; hash = (37 * hash) + PAGE_FORMAT_FIELD_NUMBER; hash = (53 * hash) + pageFormat_; if (hasGlobalSiteTag()) { hash = (37 * hash) + GLOBAL_SITE_TAG_FIELD_NUMBER; hash = (53 * hash) + getGlobalSiteTag().hashCode(); } if (hasEventSnippet()) { hash = (37 * hash) + EVENT_SNIPPET_FIELD_NUMBER; hash = (53 * hash) + getEventSnippet().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.TagSnippet parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.TagSnippet parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.TagSnippet parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.common.TagSnippet prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The site tag and event snippet pair for a TrackingCodeType. * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.TagSnippet} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.TagSnippet) com.google.ads.googleads.v21.common.TagSnippetOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.TagSnippetProto.internal_static_google_ads_googleads_v21_common_TagSnippet_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.TagSnippetProto.internal_static_google_ads_googleads_v21_common_TagSnippet_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.TagSnippet.class, com.google.ads.googleads.v21.common.TagSnippet.Builder.class); } // Construct using com.google.ads.googleads.v21.common.TagSnippet.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; type_ = 0; pageFormat_ = 0; globalSiteTag_ = ""; eventSnippet_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.common.TagSnippetProto.internal_static_google_ads_googleads_v21_common_TagSnippet_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.common.TagSnippet getDefaultInstanceForType() { return com.google.ads.googleads.v21.common.TagSnippet.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.common.TagSnippet build() { com.google.ads.googleads.v21.common.TagSnippet result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.common.TagSnippet buildPartial() { com.google.ads.googleads.v21.common.TagSnippet result = new com.google.ads.googleads.v21.common.TagSnippet(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.common.TagSnippet result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.type_ = type_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageFormat_ = pageFormat_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.globalSiteTag_ = globalSiteTag_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000008) != 0)) { result.eventSnippet_ = eventSnippet_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.common.TagSnippet) { return mergeFrom((com.google.ads.googleads.v21.common.TagSnippet)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.common.TagSnippet other) { if (other == com.google.ads.googleads.v21.common.TagSnippet.getDefaultInstance()) return this; if (other.type_ != 0) { setTypeValue(other.getTypeValue()); } if (other.pageFormat_ != 0) { setPageFormatValue(other.getPageFormatValue()); } if (other.hasGlobalSiteTag()) { globalSiteTag_ = other.globalSiteTag_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasEventSnippet()) { eventSnippet_ = other.eventSnippet_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { type_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { pageFormat_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 42: { globalSiteTag_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 case 50: { eventSnippet_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int type_ = 0; /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The enum numeric value on the wire for type. */ @java.lang.Override public int getTypeValue() { return type_; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @param value The enum numeric value on the wire for type to set. * @return This builder for chaining. */ public Builder setTypeValue(int value) { type_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return The type. */ @java.lang.Override public com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType getType() { com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType result = com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType.forNumber(type_); return result == null ? com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType.UNRECOGNIZED : result; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @param value The type to set. * @return This builder for chaining. */ public Builder setType(com.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; type_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The type of the generated tag snippets for tracking conversions. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodeTypeEnum.TrackingCodeType type = 1;</code> * @return This builder for chaining. */ public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000001); type_ = 0; onChanged(); return this; } private int pageFormat_ = 0; /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The enum numeric value on the wire for pageFormat. */ @java.lang.Override public int getPageFormatValue() { return pageFormat_; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @param value The enum numeric value on the wire for pageFormat to set. * @return This builder for chaining. */ public Builder setPageFormatValue(int value) { pageFormat_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return The pageFormat. */ @java.lang.Override public com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat getPageFormat() { com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat result = com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.forNumber(pageFormat_); return result == null ? com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat.UNRECOGNIZED : result; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @param value The pageFormat to set. * @return This builder for chaining. */ public Builder setPageFormat(com.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; pageFormat_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The format of the web page where the tracking tag and snippet will be * installed, for example, HTML. * </pre> * * <code>.google.ads.googleads.v21.enums.TrackingCodePageFormatEnum.TrackingCodePageFormat page_format = 2;</code> * @return This builder for chaining. */ public Builder clearPageFormat() { bitField0_ = (bitField0_ & ~0x00000002); pageFormat_ = 0; onChanged(); return this; } private java.lang.Object globalSiteTag_ = ""; /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return Whether the globalSiteTag field is set. */ public boolean hasGlobalSiteTag() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The globalSiteTag. */ public java.lang.String getGlobalSiteTag() { java.lang.Object ref = globalSiteTag_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); globalSiteTag_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return The bytes for globalSiteTag. */ public com.google.protobuf.ByteString getGlobalSiteTagBytes() { java.lang.Object ref = globalSiteTag_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); globalSiteTag_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @param value The globalSiteTag to set. * @return This builder for chaining. */ public Builder setGlobalSiteTag( java.lang.String value) { if (value == null) { throw new NullPointerException(); } globalSiteTag_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @return This builder for chaining. */ public Builder clearGlobalSiteTag() { globalSiteTag_ = getDefaultInstance().getGlobalSiteTag(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * The site tag that adds visitors to your basic remarketing lists and sets * new cookies on your domain. * </pre> * * <code>optional string global_site_tag = 5;</code> * @param value The bytes for globalSiteTag to set. * @return This builder for chaining. */ public Builder setGlobalSiteTagBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); globalSiteTag_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object eventSnippet_ = ""; /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return Whether the eventSnippet field is set. */ public boolean hasEventSnippet() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The eventSnippet. */ public java.lang.String getEventSnippet() { java.lang.Object ref = eventSnippet_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventSnippet_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return The bytes for eventSnippet. */ public com.google.protobuf.ByteString getEventSnippetBytes() { java.lang.Object ref = eventSnippet_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); eventSnippet_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @param value The eventSnippet to set. * @return This builder for chaining. */ public Builder setEventSnippet( java.lang.String value) { if (value == null) { throw new NullPointerException(); } eventSnippet_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @return This builder for chaining. */ public Builder clearEventSnippet() { eventSnippet_ = getDefaultInstance().getEventSnippet(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * <pre> * The event snippet that works with the site tag to track actions that * should be counted as conversions. * </pre> * * <code>optional string event_snippet = 6;</code> * @param value The bytes for eventSnippet to set. * @return This builder for chaining. */ public Builder setEventSnippetBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); eventSnippet_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.TagSnippet) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.TagSnippet) private static final com.google.ads.googleads.v21.common.TagSnippet DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.TagSnippet(); } public static com.google.ads.googleads.v21.common.TagSnippet getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TagSnippet> PARSER = new com.google.protobuf.AbstractParser<TagSnippet>() { @java.lang.Override public TagSnippet parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TagSnippet> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TagSnippet> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.common.TagSnippet getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,065
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/ImportTestCasesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/test_case.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * The request message for * [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.ImportTestCasesRequest} */ public final class ImportTestCasesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.ImportTestCasesRequest) ImportTestCasesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ImportTestCasesRequest.newBuilder() to construct. private ImportTestCasesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportTestCasesRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportTestCasesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3_ImportTestCasesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3_ImportTestCasesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest.class, com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest.Builder.class); } private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_URI(2), CONTENT(3), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 2: return GCS_URI; case 3: return CONTENT; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to import test cases to. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The agent to import test cases to. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GCS_URI_FIELD_NUMBER = 2; /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @return Whether the gcsUri field is set. */ public boolean hasGcsUri() { return sourceCase_ == 2; } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @return The gcsUri. */ public java.lang.String getGcsUri() { java.lang.Object ref = ""; if (sourceCase_ == 2) { ref = source_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (sourceCase_ == 2) { source_ = s; } return s; } } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @return The bytes for gcsUri. */ public com.google.protobuf.ByteString getGcsUriBytes() { java.lang.Object ref = ""; if (sourceCase_ == 2) { ref = source_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (sourceCase_ == 2) { source_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONTENT_FIELD_NUMBER = 3; /** * * * <pre> * Uncompressed raw byte content for test cases. * </pre> * * <code>bytes content = 3;</code> * * @return Whether the content field is set. */ @java.lang.Override public boolean hasContent() { return sourceCase_ == 3; } /** * * * <pre> * Uncompressed raw byte content for test cases. * </pre> * * <code>bytes content = 3;</code> * * @return The content. */ @java.lang.Override public com.google.protobuf.ByteString getContent() { if (sourceCase_ == 3) { return (com.google.protobuf.ByteString) source_; } return com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (sourceCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, source_); } if (sourceCase_ == 3) { output.writeBytes(3, (com.google.protobuf.ByteString) source_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (sourceCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, source_); } if (sourceCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeBytesSize( 3, (com.google.protobuf.ByteString) source_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest other = (com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 2: if (!getGcsUri().equals(other.getGcsUri())) return false; break; case 3: if (!getContent().equals(other.getContent())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); switch (sourceCase_) { case 2: hash = (37 * hash) + GCS_URI_FIELD_NUMBER; hash = (53 * hash) + getGcsUri().hashCode(); break; case 3: hash = (37 * hash) + CONTENT_FIELD_NUMBER; hash = (53 * hash) + getContent().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [TestCases.ImportTestCases][google.cloud.dialogflow.cx.v3.TestCases.ImportTestCases]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.ImportTestCasesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.ImportTestCasesRequest) com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3_ImportTestCasesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3_ImportTestCasesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest.class, com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3_ImportTestCasesRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest build() { com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest buildPartial() { com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest result = new com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } } private void buildPartialOneofs( com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest other) { if (other == com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } switch (other.getSourceCase()) { case GCS_URI: { sourceCase_ = 2; source_ = other.source_; onChanged(); break; } case CONTENT: { setContent(other.getContent()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); sourceCase_ = 2; source_ = s; break; } // case 18 case 26: { source_ = input.readBytes(); sourceCase_ = 3; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to import test cases to. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The agent to import test cases to. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The agent to import test cases to. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The agent to import test cases to. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The agent to import test cases to. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @return Whether the gcsUri field is set. */ @java.lang.Override public boolean hasGcsUri() { return sourceCase_ == 2; } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @return The gcsUri. */ @java.lang.Override public java.lang.String getGcsUri() { java.lang.Object ref = ""; if (sourceCase_ == 2) { ref = source_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (sourceCase_ == 2) { source_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @return The bytes for gcsUri. */ @java.lang.Override public com.google.protobuf.ByteString getGcsUriBytes() { java.lang.Object ref = ""; if (sourceCase_ == 2) { ref = source_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (sourceCase_ == 2) { source_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @param value The gcsUri to set. * @return This builder for chaining. */ public Builder setGcsUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } sourceCase_ = 2; source_ = value; onChanged(); return this; } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @return This builder for chaining. */ public Builder clearGcsUri() { if (sourceCase_ == 2) { sourceCase_ = 0; source_ = null; onChanged(); } return this; } /** * * * <pre> * The [Google Cloud Storage](https://cloud.google.com/storage/docs/) URI * to import test cases from. The format of this URI must be * `gs://&lt;bucket-name&gt;/&lt;object-name&gt;`. * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string gcs_uri = 2;</code> * * @param value The bytes for gcsUri to set. * @return This builder for chaining. */ public Builder setGcsUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); sourceCase_ = 2; source_ = value; onChanged(); return this; } /** * * * <pre> * Uncompressed raw byte content for test cases. * </pre> * * <code>bytes content = 3;</code> * * @return Whether the content field is set. */ public boolean hasContent() { return sourceCase_ == 3; } /** * * * <pre> * Uncompressed raw byte content for test cases. * </pre> * * <code>bytes content = 3;</code> * * @return The content. */ public com.google.protobuf.ByteString getContent() { if (sourceCase_ == 3) { return (com.google.protobuf.ByteString) source_; } return com.google.protobuf.ByteString.EMPTY; } /** * * * <pre> * Uncompressed raw byte content for test cases. * </pre> * * <code>bytes content = 3;</code> * * @param value The content to set. * @return This builder for chaining. */ public Builder setContent(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } sourceCase_ = 3; source_ = value; onChanged(); return this; } /** * * * <pre> * Uncompressed raw byte content for test cases. * </pre> * * <code>bytes content = 3;</code> * * @return This builder for chaining. */ public Builder clearContent() { if (sourceCase_ == 3) { sourceCase_ = 0; source_ = null; onChanged(); } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.ImportTestCasesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.ImportTestCasesRequest) private static final com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest(); } public static com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportTestCasesRequest> PARSER = new com.google.protobuf.AbstractParser<ImportTestCasesRequest>() { @java.lang.Override public ImportTestCasesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportTestCasesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportTestCasesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ImportTestCasesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,100
java-filestore/proto-google-cloud-filestore-v1/src/main/java/com/google/cloud/filestore/v1/CreateInstanceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/filestore/v1/cloud_filestore_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.filestore.v1; /** * * * <pre> * CreateInstanceRequest creates an instance. * </pre> * * Protobuf type {@code google.cloud.filestore.v1.CreateInstanceRequest} */ public final class CreateInstanceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.filestore.v1.CreateInstanceRequest) CreateInstanceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateInstanceRequest.newBuilder() to construct. private CreateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateInstanceRequest() { parent_ = ""; instanceId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateInstanceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.filestore.v1.CreateInstanceRequest.class, com.google.cloud.filestore.v1.CreateInstanceRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The instance's project and location, in the format * `projects/{project_id}/locations/{location}`. In Filestore, * locations map to Google Cloud zones, for example **us-west1-b**. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The instance's project and location, in the format * `projects/{project_id}/locations/{location}`. In Filestore, * locations map to Google Cloud zones, for example **us-west1-b**. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INSTANCE_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object instanceId_ = ""; /** * * * <pre> * Required. The name of the instance to create. * The name must be unique for the specified project and location. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The instanceId. */ @java.lang.Override public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } } /** * * * <pre> * Required. The name of the instance to create. * The name must be unique for the specified project and location. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for instanceId. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INSTANCE_FIELD_NUMBER = 3; private com.google.cloud.filestore.v1.Instance instance_; /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ @java.lang.Override public boolean hasInstance() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ @java.lang.Override public com.google.cloud.filestore.v1.Instance getInstance() { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.filestore.v1.InstanceOrBuilder getInstanceOrBuilder() { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getInstance()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getInstance()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.filestore.v1.CreateInstanceRequest)) { return super.equals(obj); } com.google.cloud.filestore.v1.CreateInstanceRequest other = (com.google.cloud.filestore.v1.CreateInstanceRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getInstanceId().equals(other.getInstanceId())) return false; if (hasInstance() != other.hasInstance()) return false; if (hasInstance()) { if (!getInstance().equals(other.getInstance())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.CreateInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.filestore.v1.CreateInstanceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * CreateInstanceRequest creates an instance. * </pre> * * Protobuf type {@code google.cloud.filestore.v1.CreateInstanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.filestore.v1.CreateInstanceRequest) com.google.cloud.filestore.v1.CreateInstanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.filestore.v1.CreateInstanceRequest.class, com.google.cloud.filestore.v1.CreateInstanceRequest.Builder.class); } // Construct using com.google.cloud.filestore.v1.CreateInstanceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getInstanceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; instanceId_ = ""; instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateInstanceRequest_descriptor; } @java.lang.Override public com.google.cloud.filestore.v1.CreateInstanceRequest getDefaultInstanceForType() { return com.google.cloud.filestore.v1.CreateInstanceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.filestore.v1.CreateInstanceRequest build() { com.google.cloud.filestore.v1.CreateInstanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.filestore.v1.CreateInstanceRequest buildPartial() { com.google.cloud.filestore.v1.CreateInstanceRequest result = new com.google.cloud.filestore.v1.CreateInstanceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.filestore.v1.CreateInstanceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instanceId_ = instanceId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.filestore.v1.CreateInstanceRequest) { return mergeFrom((com.google.cloud.filestore.v1.CreateInstanceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.filestore.v1.CreateInstanceRequest other) { if (other == com.google.cloud.filestore.v1.CreateInstanceRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getInstanceId().isEmpty()) { instanceId_ = other.instanceId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasInstance()) { mergeInstance(other.getInstance()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { instanceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The instance's project and location, in the format * `projects/{project_id}/locations/{location}`. In Filestore, * locations map to Google Cloud zones, for example **us-west1-b**. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The instance's project and location, in the format * `projects/{project_id}/locations/{location}`. In Filestore, * locations map to Google Cloud zones, for example **us-west1-b**. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The instance's project and location, in the format * `projects/{project_id}/locations/{location}`. In Filestore, * locations map to Google Cloud zones, for example **us-west1-b**. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The instance's project and location, in the format * `projects/{project_id}/locations/{location}`. In Filestore, * locations map to Google Cloud zones, for example **us-west1-b**. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The instance's project and location, in the format * `projects/{project_id}/locations/{location}`. In Filestore, * locations map to Google Cloud zones, for example **us-west1-b**. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object instanceId_ = ""; /** * * * <pre> * Required. The name of the instance to create. * The name must be unique for the specified project and location. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The instanceId. */ public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the instance to create. * The name must be unique for the specified project and location. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for instanceId. */ public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the instance to create. * The name must be unique for the specified project and location. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The instanceId to set. * @return This builder for chaining. */ public Builder setInstanceId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The name of the instance to create. * The name must be unique for the specified project and location. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearInstanceId() { instanceId_ = getDefaultInstance().getInstanceId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The name of the instance to create. * The name must be unique for the specified project and location. * </pre> * * <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for instanceId to set. * @return This builder for chaining. */ public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.filestore.v1.Instance instance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Instance, com.google.cloud.filestore.v1.Instance.Builder, com.google.cloud.filestore.v1.InstanceOrBuilder> instanceBuilder_; /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ public boolean hasInstance() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ public com.google.cloud.filestore.v1.Instance getInstance() { if (instanceBuilder_ == null) { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } else { return instanceBuilder_.getMessage(); } } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance(com.google.cloud.filestore.v1.Instance value) { if (instanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; } else { instanceBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance(com.google.cloud.filestore.v1.Instance.Builder builderForValue) { if (instanceBuilder_ == null) { instance_ = builderForValue.build(); } else { instanceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeInstance(com.google.cloud.filestore.v1.Instance value) { if (instanceBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && instance_ != null && instance_ != com.google.cloud.filestore.v1.Instance.getDefaultInstance()) { getInstanceBuilder().mergeFrom(value); } else { instance_ = value; } } else { instanceBuilder_.mergeFrom(value); } if (instance_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000004); instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.filestore.v1.Instance.Builder getInstanceBuilder() { bitField0_ |= 0x00000004; onChanged(); return getInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.filestore.v1.InstanceOrBuilder getInstanceOrBuilder() { if (instanceBuilder_ != null) { return instanceBuilder_.getMessageOrBuilder(); } else { return instance_ == null ? com.google.cloud.filestore.v1.Instance.getDefaultInstance() : instance_; } } /** * * * <pre> * Required. An [instance resource][google.cloud.filestore.v1.Instance] * </pre> * * <code> * .google.cloud.filestore.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Instance, com.google.cloud.filestore.v1.Instance.Builder, com.google.cloud.filestore.v1.InstanceOrBuilder> getInstanceFieldBuilder() { if (instanceBuilder_ == null) { instanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Instance, com.google.cloud.filestore.v1.Instance.Builder, com.google.cloud.filestore.v1.InstanceOrBuilder>( getInstance(), getParentForChildren(), isClean()); instance_ = null; } return instanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.filestore.v1.CreateInstanceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.filestore.v1.CreateInstanceRequest) private static final com.google.cloud.filestore.v1.CreateInstanceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.filestore.v1.CreateInstanceRequest(); } public static com.google.cloud.filestore.v1.CreateInstanceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateInstanceRequest> PARSER = new com.google.protobuf.AbstractParser<CreateInstanceRequest>() { @java.lang.Override public CreateInstanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateInstanceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateInstanceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.filestore.v1.CreateInstanceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ratis
37,383
ratis-test/src/test/java/org/apache/ratis/server/raftlog/segmented/TestSegmentedRaftLog.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ratis.server.raftlog.segmented; import org.apache.ratis.BaseTest; import org.apache.ratis.RaftTestUtil.SimpleOperation; import org.apache.ratis.conf.RaftProperties; import org.apache.ratis.metrics.RatisMetricRegistry; import org.apache.ratis.metrics.impl.DefaultTimekeeperImpl; import org.apache.ratis.protocol.RaftGroupId; import org.apache.ratis.protocol.RaftGroupMemberId; import org.apache.ratis.protocol.RaftPeerId; import org.apache.ratis.protocol.exceptions.TimeoutIOException; import org.apache.ratis.server.RaftServerConfigKeys; import org.apache.ratis.server.impl.RetryCacheTestUtil; import org.apache.ratis.server.RetryCache; import org.apache.ratis.server.metrics.RaftLogMetricsBase; import org.apache.ratis.server.protocol.TermIndex; import org.apache.ratis.proto.RaftProtos.LogEntryProto; import org.apache.ratis.server.raftlog.LogEntryHeader; import org.apache.ratis.server.raftlog.LogProtoUtils; import org.apache.ratis.server.raftlog.RaftLog; import org.apache.ratis.server.storage.RaftStorage; import org.apache.ratis.server.storage.RaftStorageTestUtils; import org.apache.ratis.statemachine.impl.SimpleStateMachine4Testing; import org.apache.ratis.statemachine.StateMachine; import org.apache.ratis.statemachine.impl.BaseStateMachine; import org.apache.ratis.util.CodeInjectionForTesting; import org.apache.ratis.util.DataBlockingQueue; import org.apache.ratis.util.LifeCycle; import org.apache.ratis.util.Slf4jUtils; import org.apache.ratis.util.FileUtils; import org.apache.ratis.util.JavaUtils; import org.apache.ratis.util.SizeInBytes; import org.apache.ratis.util.TimeDuration; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.stream.Stream; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.slf4j.event.Level; import static java.lang.Boolean.FALSE; import static java.lang.Boolean.TRUE; import static org.apache.ratis.server.raftlog.segmented.SegmentedRaftLogWorker.RUN_WORKER; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.params.provider.Arguments.arguments; public class TestSegmentedRaftLog extends BaseTest { static { Slf4jUtils.setLogLevel(SegmentedRaftLogWorker.LOG, Level.INFO); Slf4jUtils.setLogLevel(SegmentedRaftLogCache.LOG, Level.INFO); Slf4jUtils.setLogLevel(SegmentedRaftLog.LOG, Level.INFO); } public static Stream<Arguments> data() { return Stream.of( arguments(FALSE, FALSE), arguments(FALSE, TRUE), arguments(TRUE, FALSE), arguments(TRUE, TRUE)); } public static long getOpenSegmentSize(RaftLog raftLog) { return ((SegmentedRaftLog)raftLog).getRaftLogCache().getOpenSegment().getTotalFileSize(); } private static final RaftPeerId PEER_ID = RaftPeerId.valueOf("s0"); private static final RaftGroupId GROUP_ID = RaftGroupId.randomId(); private static final RaftGroupMemberId MEMBER_ID = RaftGroupMemberId.valueOf(PEER_ID, GROUP_ID); static class SegmentRange { final long start; final long end; final long term; final boolean isOpen; SegmentRange(long s, long e, long term, boolean isOpen) { this.start = s; this.end = e; this.term = term; this.isOpen = isOpen; } File getFile(RaftStorage storage) { return LogSegmentStartEnd.valueOf(start, end, isOpen).getFile(storage); } } private File storageDir; private RaftProperties properties; private RaftStorage storage; private long segmentMaxSize; private long preallocatedSize; private int bufferSize; SegmentedRaftLog newSegmentedRaftLog() { return newSegmentedRaftLog(storage, properties); } SegmentedRaftLog newSegmentedRaftLog(LongSupplier getSnapshotIndexFromStateMachine) { return newSegmentedRaftLogWithSnapshotIndex(storage, properties, getSnapshotIndexFromStateMachine); } static SegmentedRaftLog newSegmentedRaftLog(RaftStorage storage, RaftProperties properties) { return SegmentedRaftLog.newBuilder() .setMemberId(MEMBER_ID) .setStorage(storage) .setProperties(properties) .build(); } private SegmentedRaftLog newSegmentedRaftLogWithSnapshotIndex(RaftStorage storage, RaftProperties properties, LongSupplier getSnapshotIndexFromStateMachine) { return SegmentedRaftLog.newBuilder() .setMemberId(MEMBER_ID) .setStorage(storage) .setSnapshotIndexSupplier(getSnapshotIndexFromStateMachine) .setProperties(properties) .build(); } @BeforeEach public void setup() throws Exception { storageDir = getTestDir(); properties = new RaftProperties(); RaftServerConfigKeys.setStorageDir(properties, Collections.singletonList(storageDir)); storage = RaftStorageTestUtils.newRaftStorage(storageDir); this.segmentMaxSize = RaftServerConfigKeys.Log.segmentSizeMax(properties).getSize(); this.preallocatedSize = RaftServerConfigKeys.Log.preallocatedSize(properties).getSize(); this.bufferSize = RaftServerConfigKeys.Log.writeBufferSize(properties).getSizeInt(); } @AfterEach public void tearDown() throws Exception { if (storageDir != null) { FileUtils.deleteFully(storageDir.getParentFile()); } } private LogEntryProto[] prepareLog(List<SegmentRange> list) throws IOException { List<LogEntryProto> entryList = new ArrayList<>(); for (SegmentRange range : list) { final File file = range.getFile(storage); final int size = (int) (range.end - range.start + 1); LogEntryProto[] entries = new LogEntryProto[size]; try (SegmentedRaftLogOutputStream out = new SegmentedRaftLogOutputStream(file, false, segmentMaxSize, preallocatedSize, ByteBuffer.allocateDirect(bufferSize))) { for (int i = 0; i < size; i++) { SimpleOperation m = new SimpleOperation("m" + (i + range.start)); entries[i] = LogProtoUtils.toLogEntryProto(m.getLogEntryContent(), range.term, i + range.start); out.write(entries[i]); } } Collections.addAll(entryList, entries); } return entryList.toArray(new LogEntryProto[entryList.size()]); } static List<SegmentRange> prepareRanges(int startTerm, int endTerm, int segmentSize, long startIndex) { List<SegmentRange> list = new ArrayList<>(endTerm - startTerm); for (int i = startTerm; i < endTerm; i++) { list.add(new SegmentRange(startIndex, startIndex + segmentSize - 1, i, i == endTerm - 1)); startIndex += segmentSize; } return list; } private LogEntryProto getLastEntry(SegmentedRaftLog raftLog) throws IOException { return raftLog.get(raftLog.getLastEntryTermIndex().getIndex()); } @ParameterizedTest @MethodSource("data") public void testLoadLogSegments(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); // first generate log files List<SegmentRange> ranges = prepareRanges(0, 5, 100, 0); LogEntryProto[] entries = prepareLog(ranges); // create RaftLog object and load log file try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // check if log entries are loaded correctly for (LogEntryProto e : entries) { LogEntryProto entry = raftLog.get(e.getIndex()); Assertions.assertEquals(e, entry); } final LogEntryHeader[] termIndices = raftLog.getEntries(0, 500); LogEntryProto[] entriesFromLog = Arrays.stream(termIndices) .map(ti -> { try { return raftLog.get(ti.getIndex()); } catch (IOException e) { throw new RuntimeException(e); } }) .toArray(LogEntryProto[]::new); Assertions.assertArrayEquals(entries, entriesFromLog); Assertions.assertEquals(entries[entries.length - 1], getLastEntry(raftLog)); final RatisMetricRegistry metricRegistryForLogWorker = RaftLogMetricsBase.createRegistry(MEMBER_ID); final DefaultTimekeeperImpl load = (DefaultTimekeeperImpl) metricRegistryForLogWorker.timer("segmentLoadLatency"); assertTrue(load.getTimer().getMeanRate() > 0); final DefaultTimekeeperImpl read = (DefaultTimekeeperImpl) metricRegistryForLogWorker.timer("readEntryLatency"); assertTrue(read.getTimer().getMeanRate() > 0); } } static List<LogEntryProto> prepareLogEntries(List<SegmentRange> slist, Supplier<String> stringSupplier) { List<LogEntryProto> eList = new ArrayList<>(); for (SegmentRange range : slist) { prepareLogEntries(range, stringSupplier, false, eList); } return eList; } static List<LogEntryProto> prepareLogEntries(SegmentRange range, Supplier<String> stringSupplier, boolean hasStataMachineData, List<LogEntryProto> eList) { for(long index = range.start; index <= range.end; index++) { eList.add(prepareLogEntry(range.term, index, stringSupplier, hasStataMachineData)); } return eList; } static LogEntryProto prepareLogEntry(long term, long index, Supplier<String> stringSupplier, boolean hasStataMachineData) { final SimpleOperation m = stringSupplier == null? new SimpleOperation("m" + index, hasStataMachineData): new SimpleOperation(stringSupplier.get(), hasStataMachineData); return LogProtoUtils.toLogEntryProto(m.getLogEntryContent(), term, index); } /** * Append entry one by one and check if log state is correct. */ @ParameterizedTest @MethodSource("data") public void testAppendEntry(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); List<SegmentRange> ranges = prepareRanges(0, 5, 200, 0); List<LogEntryProto> entries = prepareLogEntries(ranges, null); try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // append entries to the raftlog entries.stream().map(raftLog::appendEntry).forEach(CompletableFuture::join); } try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // check if the raft log is correct checkEntries(raftLog, entries, 0, entries.size()); } try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); TermIndex lastTermIndex = raftLog.getLastEntryTermIndex(); IllegalStateException ex = null; try { // append entry fails if append entry term is lower than log's last entry term raftLog.appendEntry(LogEntryProto.newBuilder(entries.get(0)) .setTerm(lastTermIndex.getTerm() - 1) .setIndex(lastTermIndex.getIndex() + 1).build()); } catch (IllegalStateException e) { ex = e; } assertTrue(ex.getMessage().contains("term less than RaftLog's last term")); try { // append entry fails if difference between append entry index and log's last entry index is greater than 1 raftLog.appendEntry(LogEntryProto.newBuilder(entries.get(0)) .setTerm(lastTermIndex.getTerm()) .setIndex(lastTermIndex.getIndex() + 2).build()); } catch (IllegalStateException e) { ex = e; } assertTrue(ex.getMessage().contains("and RaftLog's last index " + lastTermIndex.getIndex() + " (or snapshot index " + raftLog.getSnapshotIndex() + ") is greater than 1")); raftLog.onSnapshotInstalled(raftLog.getLastEntryTermIndex().getIndex()); try { // append entry fails if there are no log entries && log's snapshotIndex + 1 < incoming log entry. raftLog.appendEntry(LogEntryProto.newBuilder(entries.get(0)) .setTerm(lastTermIndex.getTerm()) .setIndex(lastTermIndex.getIndex() + 2).build()); } catch (IllegalStateException e) { ex = e; } assertTrue(ex.getMessage().contains("Difference between entry index and RaftLog's latest snapshot " + "index 999 is greater than 1")); } } @ParameterizedTest @MethodSource("data") public void testAppendEntryAfterPurge(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); List<SegmentRange> ranges = prepareRanges(0, 5, 200, 0); List<LogEntryProto> entries = prepareLogEntries(ranges, null); long desiredSnapshotIndex = entries.size() - 2; final LongSupplier getSnapshotIndexFromStateMachine = new LongSupplier() { private boolean firstCall = true; @Override public long getAsLong() { long index = firstCall ? -1 : desiredSnapshotIndex; firstCall = !firstCall; return index; } }; try (SegmentedRaftLog raftLog = newSegmentedRaftLog(getSnapshotIndexFromStateMachine)) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); entries.subList(0, entries.size() - 1).stream().map(raftLog::appendEntry).forEach(CompletableFuture::join); raftLog.onSnapshotInstalled(desiredSnapshotIndex); // Try appending last entry after snapshot + purge. CompletableFuture<Long> appendEntryFuture = raftLog.appendEntry(entries.get(entries.size() - 1)); assertTrue(desiredSnapshotIndex + 1 == appendEntryFuture.get()); } } /** * Keep appending entries, make sure the rolling is correct. */ @ParameterizedTest @MethodSource("data") public void testAppendAndRoll(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); RaftServerConfigKeys.Log.setPreallocatedSize(properties, SizeInBytes.valueOf("16KB")); RaftServerConfigKeys.Log.setSegmentSizeMax(properties, SizeInBytes.valueOf("128KB")); List<SegmentRange> ranges = prepareRanges(0, 1, 1024, 0); final byte[] content = new byte[1024]; List<LogEntryProto> entries = prepareLogEntries(ranges, () -> new String(content)); try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // append entries to the raftlog entries.stream().map(raftLog::appendEntry).forEach(CompletableFuture::join); } try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // check if the raft log is correct checkEntries(raftLog, entries, 0, entries.size()); Assertions.assertEquals(9, raftLog.getRaftLogCache().getNumOfSegments()); } } @ParameterizedTest @MethodSource("data") public void testPurgeAfterAppendEntry(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); RaftServerConfigKeys.Log.setPurgeGap(properties, 1); RaftServerConfigKeys.Log.setForceSyncNum(properties, 128); int startTerm = 0; int endTerm = 2; int segmentSize = 10; long endIndexOfClosedSegment = segmentSize * (endTerm - startTerm - 1); long nextStartIndex = segmentSize * (endTerm - startTerm); // append entries and roll logSegment for later purge operation List<SegmentRange> ranges0 = prepareRanges(startTerm, endTerm, segmentSize, 0); List<LogEntryProto> entries0 = prepareLogEntries(ranges0, null); try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); entries0.stream().map(raftLog::appendEntry).forEach(CompletableFuture::join); } // test the pattern in the task queue of SegmentedRaftLogWorker: (WriteLog, ..., PurgeLog) List<SegmentRange> ranges = prepareRanges(endTerm - 1, endTerm, 1, nextStartIndex); List<LogEntryProto> entries = prepareLogEntries(ranges, null); try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { final CountDownLatch raftLogOpened = new CountDownLatch(1); final CountDownLatch tasksAdded = new CountDownLatch(1); // inject test code to make the pattern (WriteLog, PurgeLog) final ConcurrentLinkedQueue<CompletableFuture<Long>> appendFutures = new ConcurrentLinkedQueue<>(); final AtomicReference<CompletableFuture<Long>> purgeFuture = new AtomicReference<>(); final AtomicInteger tasksCount = new AtomicInteger(0); CodeInjectionForTesting.put(RUN_WORKER, (localId, remoteId, args) -> { // wait for raftLog to be opened try { if(!raftLogOpened.await(FIVE_SECONDS.getDuration(), FIVE_SECONDS.getUnit())) { throw new TimeoutException(); } } catch (InterruptedException | TimeoutException e) { LOG.error("an exception occurred", e); throw new RuntimeException(e); } // add WriteLog and PurgeLog tasks entries.stream().map(raftLog::appendEntry).forEach(appendFutures::add); purgeFuture.set(raftLog.purge(endIndexOfClosedSegment)); tasksCount.set(((DataBlockingQueue<?>) args[0]).getNumElements()); tasksAdded.countDown(); return true; }); // open raftLog raftLog.open(RaftLog.INVALID_LOG_INDEX, null); raftLogOpened.countDown(); // wait for all tasks to be added if(!tasksAdded.await(FIVE_SECONDS.getDuration(), FIVE_SECONDS.getUnit())) { throw new TimeoutException(); } Assertions.assertEquals(entries.size() + 1, tasksCount.get()); // check if the purge task is executed final Long purged = purgeFuture.get().get(); LOG.info("purgeIndex = {}, purged = {}", endIndexOfClosedSegment, purged); Assertions.assertEquals(endIndexOfClosedSegment, raftLog.getRaftLogCache().getStartIndex()); // check if the appendEntry futures are done JavaUtils.allOf(appendFutures).get(FIVE_SECONDS.getDuration(), FIVE_SECONDS.getUnit()); } finally { CodeInjectionForTesting.put(RUN_WORKER, (localId, remoteId, args) -> false); } } @ParameterizedTest @MethodSource("data") public void testTruncate(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); // prepare the log for truncation List<SegmentRange> ranges = prepareRanges(0, 5, 200, 0); List<LogEntryProto> entries = prepareLogEntries(ranges, null); try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // append entries to the raftlog entries.stream().map(raftLog::appendEntry).forEach(CompletableFuture::join); } for (long fromIndex = 900; fromIndex >= 0; fromIndex -= 150) { testTruncate(entries, fromIndex); } } private void testTruncate(List<LogEntryProto> entries, long fromIndex) throws Exception { try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // truncate the log raftLog.truncate(fromIndex).join(); checkEntries(raftLog, entries, 0, (int) fromIndex); } try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // check if the raft log is correct if (fromIndex > 0) { Assertions.assertEquals(entries.get((int) (fromIndex - 1)), getLastEntry(raftLog)); } else { Assertions.assertNull(raftLog.getLastEntryTermIndex()); } checkEntries(raftLog, entries, 0, (int) fromIndex); } } private void checkEntries(RaftLog raftLog, List<LogEntryProto> expected, int offset, int size) throws IOException { if (size > 0) { for (int i = offset; i < size + offset; i++) { LogEntryProto entry = raftLog.get(expected.get(i).getIndex()); Assertions.assertEquals(expected.get(i), entry); } final LogEntryHeader[] termIndices = raftLog.getEntries( expected.get(offset).getIndex(), expected.get(offset + size - 1).getIndex() + 1); LogEntryProto[] entriesFromLog = Arrays.stream(termIndices) .map(ti -> { try { return raftLog.get(ti.getIndex()); } catch (IOException e) { throw new RuntimeException(e); } }) .toArray(LogEntryProto[]::new); LogEntryProto[] expectedArray = expected.subList(offset, offset + size) .stream().toArray(LogEntryProto[]::new); Assertions.assertArrayEquals(expectedArray, entriesFromLog); } } private void checkFailedEntries(List<LogEntryProto> entries, long fromIndex, RetryCache retryCache) { for (int i = 0; i < entries.size(); i++) { if (i < fromIndex) { RetryCacheTestUtil.assertFailure(retryCache, entries.get(i), false); } else { RetryCacheTestUtil.assertFailure(retryCache, entries.get(i), true); } } } @Test public void testPurgeOnOpenSegment() throws Exception { int startTerm = 0; int endTerm = 5; int segmentSize = 200; long beginIndexOfOpenSegment = segmentSize * (endTerm - startTerm - 1); long expectedIndex = segmentSize * (endTerm - startTerm - 1); long purgePreservation = 0L; purgeAndVerify(startTerm, endTerm, segmentSize, 1, beginIndexOfOpenSegment, expectedIndex); } @Test public void testPurgeOnClosedSegments() throws Exception { int startTerm = 0; int endTerm = 5; int segmentSize = 200; long endIndexOfClosedSegment = segmentSize * (endTerm - startTerm - 1) - 1; long expectedIndex = segmentSize * (endTerm - startTerm - 1); purgeAndVerify(startTerm, endTerm, segmentSize, 1, endIndexOfClosedSegment, expectedIndex); } @Test public void testPurgeLogMetric() throws Exception { int startTerm = 0; int endTerm = 5; int segmentSize = 200; long endIndexOfClosedSegment = segmentSize * (endTerm - startTerm - 1) - 1; long expectedIndex = segmentSize * (endTerm - startTerm - 1); final RatisMetricRegistry metricRegistryForLogWorker = RaftLogMetricsBase.createRegistry(MEMBER_ID); purgeAndVerify(startTerm, endTerm, segmentSize, 1, endIndexOfClosedSegment, expectedIndex); final DefaultTimekeeperImpl purge = (DefaultTimekeeperImpl) metricRegistryForLogWorker.timer("purgeLog"); assertTrue(purge.getTimer().getCount() > 0); } @Test public void testPurgeOnClosedSegmentsWithPurgeGap() throws Exception { int startTerm = 0; int endTerm = 5; int segmentSize = 200; long endIndexOfClosedSegment = segmentSize * (endTerm - startTerm - 1) - 1; long expectedIndex = RaftLog.LEAST_VALID_LOG_INDEX; purgeAndVerify(startTerm, endTerm, segmentSize, 1000, endIndexOfClosedSegment, expectedIndex); } @Test public void testPurgeWithLargePurgePreservationAndSmallPurgeGap() throws Exception { int startTerm = 0; int endTerm = 5; int segmentSize = 200; long endIndex = segmentSize * (endTerm - startTerm) - 1; // start index is set so that the suggested index will not be negative, which will not trigger any purge long startIndex = 200; // purge preservation is larger than the total size of the log entries // which causes suggested index to be lower than the start index long purgePreservation = (segmentSize * (endTerm - startTerm )) + 100; // if the suggested index is lower than the start index due to the purge preservation, we should not purge anything purgeAndVerify(startTerm, endTerm, segmentSize, 1, endIndex, startIndex, startIndex, purgePreservation); } private void purgeAndVerify(int startTerm, int endTerm, int segmentSize, int purgeGap, long purgeIndex, long expectedIndex) throws Exception { purgeAndVerify(startTerm, endTerm, segmentSize, purgeGap, purgeIndex, expectedIndex, 0, 0); } private void purgeAndVerify(int startTerm, int endTerm, int segmentSize, int purgeGap, long purgeIndex, long expectedIndex, long startIndex, long purgePreservation) throws Exception { List<SegmentRange> ranges = prepareRanges(startTerm, endTerm, segmentSize, startIndex); List<LogEntryProto> entries = prepareLogEntries(ranges, null); final RaftProperties p = new RaftProperties(); RaftServerConfigKeys.Log.setPurgeGap(p, purgeGap); RaftServerConfigKeys.Log.setPurgePreservationLogNum(p, purgePreservation); try (SegmentedRaftLog raftLog = newSegmentedRaftLogWithSnapshotIndex(storage, p, () -> startIndex - 1)) { raftLog.open(startIndex - 1, null); entries.stream().map(raftLog::appendEntry).forEach(CompletableFuture::join); final CompletableFuture<Long> f = raftLog.purge(purgeIndex); final Long purged = f.get(); LOG.info("purgeIndex = {}, purged = {}", purgeIndex, purged); Assertions.assertEquals(expectedIndex, raftLog.getRaftLogCache().getStartIndex()); } } /** * Test append with inconsistent entries */ @ParameterizedTest @MethodSource("data") public void testAppendEntriesWithInconsistency(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); // prepare the log for truncation List<SegmentRange> ranges = prepareRanges(0, 5, 200, 0); List<LogEntryProto> entries = prepareLogEntries(ranges, null); final RetryCache retryCache = RetryCacheTestUtil.createRetryCache(); try (SegmentedRaftLog raftLog = RetryCacheTestUtil.newSegmentedRaftLog(MEMBER_ID, retryCache, storage, properties)) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); entries.forEach(entry -> RetryCacheTestUtil.createEntry(retryCache, entry)); // append entries to the raftlog entries.stream().map(raftLog::appendEntry).forEach(CompletableFuture::join); } // append entries whose first 100 entries are the same with existing log, // and the next 100 are with different term SegmentRange r1 = new SegmentRange(550, 599, 2, false); SegmentRange r2 = new SegmentRange(600, 649, 3, false); SegmentRange r3 = new SegmentRange(650, 749, 10, false); List<LogEntryProto> newEntries = prepareLogEntries( Arrays.asList(r1, r2, r3), null); try (SegmentedRaftLog raftLog = RetryCacheTestUtil.newSegmentedRaftLog(MEMBER_ID, retryCache, storage, properties)) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); LOG.info("newEntries[0] = {}", newEntries.get(0)); final int last = newEntries.size() - 1; LOG.info("newEntries[{}] = {}", last, newEntries.get(last)); raftLog.append(newEntries).forEach(CompletableFuture::join); checkFailedEntries(entries, 650, retryCache); checkEntries(raftLog, entries, 0, 650); checkEntries(raftLog, newEntries, 100, 100); Assertions.assertEquals(newEntries.get(newEntries.size() - 1), getLastEntry(raftLog)); Assertions.assertEquals(newEntries.get(newEntries.size() - 1).getIndex(), raftLog.getFlushIndex()); } // load the raftlog again and check try (SegmentedRaftLog raftLog = RetryCacheTestUtil.newSegmentedRaftLog(MEMBER_ID, retryCache, storage, properties)) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); checkEntries(raftLog, entries, 0, 650); checkEntries(raftLog, newEntries, 100, 100); Assertions.assertEquals(newEntries.get(newEntries.size() - 1), getLastEntry(raftLog)); Assertions.assertEquals(newEntries.get(newEntries.size() - 1).getIndex(), raftLog.getFlushIndex()); SegmentedRaftLogCache cache = raftLog.getRaftLogCache(); Assertions.assertEquals(5, cache.getNumOfSegments()); } } @ParameterizedTest @MethodSource("data") public void testSegmentedRaftLogStateMachineData(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); final SegmentRange range = new SegmentRange(0, 10, 1, true); final List<LogEntryProto> entries = prepareLogEntries(range, null, true, new ArrayList<>()); final SimpleStateMachine4Testing sm = new SimpleStateMachine4Testing(); try (SegmentedRaftLog raftLog = SegmentedRaftLog.newBuilder() .setMemberId(MEMBER_ID) .setStateMachine(sm) .setStorage(storage) .setProperties(properties) .build()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); int next = 0; long flush = -1; assertIndices(raftLog, flush, next); raftLog.appendEntry(entries.get(next++)); assertIndices(raftLog, flush, next); raftLog.appendEntry(entries.get(next++)); assertIndices(raftLog, flush, next); raftLog.appendEntry(entries.get(next++)); assertIndicesMultipleAttempts(raftLog, flush += 3, next); sm.blockFlushStateMachineData(); raftLog.appendEntry(entries.get(next++)); sm.blockWriteStateMachineData(); final Thread t = startAppendEntryThread(raftLog, entries.get(next++)); TimeUnit.SECONDS.sleep(1); assertTrue(t.isAlive()); sm.unblockWriteStateMachineData(); assertIndices(raftLog, flush, next); TimeUnit.SECONDS.sleep(1); assertIndices(raftLog, flush, next); sm.unblockFlushStateMachineData(); assertIndicesMultipleAttempts(raftLog, flush + 2, next); // raftLog.appendEntry(entry).get() won't return // until sm.unblockFlushStateMachineData() was called. t.join(); } } @ParameterizedTest @MethodSource("data") public void testServerShutdownOnTimeoutIOException(Boolean useAsyncFlush, Boolean smSyncFlush) throws Throwable { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, true); final TimeDuration syncTimeout = TimeDuration.valueOf(100, TimeUnit.MILLISECONDS); RaftServerConfigKeys.Log.StateMachineData.setSyncTimeout(properties, syncTimeout); final int numRetries = 2; RaftServerConfigKeys.Log.StateMachineData.setSyncTimeoutRetry(properties, numRetries); final LogEntryProto entry = prepareLogEntry(0, 0, null, true); final StateMachine sm = new BaseStateMachine() { @Override public CompletableFuture<Void> write(LogEntryProto entry) { getLifeCycle().transition(LifeCycle.State.STARTING); getLifeCycle().transition(LifeCycle.State.RUNNING); return new CompletableFuture<>(); // the future never completes } @Override public void notifyLogFailed(Throwable cause, LogEntryProto entry) { LOG.info("Test StateMachine: Ratis log failed notification received as expected.", cause); LOG.info("Test StateMachine: Transition to PAUSED state."); Assertions.assertNotNull(entry); getLifeCycle().transition(LifeCycle.State.PAUSING); getLifeCycle().transition(LifeCycle.State.PAUSED); } }; ExecutionException ex; try (SegmentedRaftLog raftLog = SegmentedRaftLog.newBuilder() .setMemberId(MEMBER_ID) .setStateMachine(sm) .setStorage(storage) .setProperties(properties) .build()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // SegmentedRaftLogWorker should catch TimeoutIOException CompletableFuture<Long> f = raftLog.appendEntry(entry); // Wait for async writeStateMachineData to finish ex = Assertions.assertThrows(ExecutionException.class, f::get); } Assertions.assertSame(LifeCycle.State.PAUSED, sm.getLifeCycleState()); Assertions.assertInstanceOf(TimeoutIOException.class, ex.getCause()); } static Thread startAppendEntryThread(RaftLog raftLog, LogEntryProto entry) { final Thread t = new Thread(() -> { try { raftLog.appendEntry(entry).get(); } catch (Throwable e) { // just ignore } }); t.start(); return t; } void assertIndices(RaftLog raftLog, long expectedFlushIndex, long expectedNextIndex) { LOG.info("assert expectedFlushIndex={}", expectedFlushIndex); Assertions.assertEquals(expectedFlushIndex, raftLog.getFlushIndex()); LOG.info("assert expectedNextIndex={}", expectedNextIndex); Assertions.assertEquals(expectedNextIndex, raftLog.getNextIndex()); } void assertIndicesMultipleAttempts(RaftLog raftLog, long expectedFlushIndex, long expectedNextIndex) throws Exception { JavaUtils.attempt(() -> assertIndices(raftLog, expectedFlushIndex, expectedNextIndex), 10, HUNDRED_MILLIS, "assertIndices", LOG); } @ParameterizedTest @MethodSource("data") public void testAsyncFlushPerf1(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); List<SegmentRange> ranges = prepareRanges(0, 50, 20000, 0); List<LogEntryProto> entries = prepareLogEntries(ranges, null); try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // append entries to the raftlog List<List<CompletableFuture<Long>>> futures = new ArrayList<>(); long start = System.nanoTime(); for (int i = 0; i < entries.size(); i += 5) { // call append API futures.add(raftLog.append(Arrays.asList( entries.get(i), entries.get(i + 1), entries.get(i + 2), entries.get(i + 3), entries.get(i + 4)))); } for (List<CompletableFuture<Long>> futureList: futures) { futureList.forEach(CompletableFuture::join); } System.out.println(entries.size() + " appendEntry finished in " + (System.nanoTime() - start) + " ns with asyncFlush " + useAsyncFlush); } } @ParameterizedTest @MethodSource("data") public void testAsyncFlushPerf2(Boolean useAsyncFlush, Boolean smSyncFlush) throws Exception { RaftServerConfigKeys.Log.setAsyncFlushEnabled(properties, useAsyncFlush); RaftServerConfigKeys.Log.StateMachineData.setSync(properties, smSyncFlush); List<SegmentRange> ranges = prepareRanges(0, 50, 20000, 0); List<LogEntryProto> entries = prepareLogEntries(ranges, null); try (SegmentedRaftLog raftLog = newSegmentedRaftLog()) { raftLog.open(RaftLog.INVALID_LOG_INDEX, null); // append entries to the raftlog List<CompletableFuture<Long>> futures = new ArrayList<>(); long start = System.nanoTime(); for (int i = 0; i < entries.size(); i++) { // call appendEntry API futures.add(raftLog.appendEntry(entries.get(i))); } for (CompletableFuture<Long> futureList: futures) { futureList.join(); } System.out.println(entries.size() + " appendEntry finished in " + (System.nanoTime() - start) + " ns with asyncFlush " + useAsyncFlush); } } }
googleapis/google-cloud-java
37,086
java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/ListSupportedDatabaseFlagsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1beta/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1beta; /** * * * <pre> * Message for listing the information about the supported Database flags. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest} */ public final class ListSupportedDatabaseFlagsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest) ListSupportedDatabaseFlagsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListSupportedDatabaseFlagsRequest.newBuilder() to construct. private ListSupportedDatabaseFlagsRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSupportedDatabaseFlagsRequest() { parent_ = ""; pageToken_ = ""; scope_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSupportedDatabaseFlagsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListSupportedDatabaseFlagsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListSupportedDatabaseFlagsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest.class, com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent resource. The required format is: * * projects/{project}/locations/{location} * * Regardless of the parent specified here, as long it is contains a valid * project and location, the service will return a static list of supported * flags resources. Note that we do not yet support region-specific * flags. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the parent resource. The required format is: * * projects/{project}/locations/{location} * * Regardless of the parent specified here, as long it is contains a valid * project and location, the service will return a static list of supported * flags resources. Note that we do not yet support region-specific * flags. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SCOPE_FIELD_NUMBER = 6; private int scope_ = 0; /** * * * <pre> * Optional. The scope for which supported flags are requested. If not * specified, default is DATABASE. * </pre> * * <code> * .google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for scope. */ @java.lang.Override public int getScopeValue() { return scope_; } /** * * * <pre> * Optional. The scope for which supported flags are requested. If not * specified, default is DATABASE. * </pre> * * <code> * .google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The scope. */ @java.lang.Override public com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope getScope() { com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope result = com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope.forNumber(scope_); return result == null ? com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (scope_ != com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope.SCOPE_UNSPECIFIED .getNumber()) { output.writeEnum(6, scope_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (scope_ != com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope.SCOPE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, scope_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest)) { return super.equals(obj); } com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest other = (com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (scope_ != other.scope_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + SCOPE_FIELD_NUMBER; hash = (53 * hash) + scope_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for listing the information about the supported Database flags. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest) com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListSupportedDatabaseFlagsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListSupportedDatabaseFlagsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest.class, com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest.Builder.class); } // Construct using // com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; scope_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListSupportedDatabaseFlagsRequest_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest getDefaultInstanceForType() { return com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest build() { com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest buildPartial() { com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest result = new com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.scope_ = scope_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest) { return mergeFrom((com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest other) { if (other == com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (other.scope_ != 0) { setScopeValue(other.getScopeValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 48: { scope_ = input.readEnum(); bitField0_ |= 0x00000008; break; } // case 48 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent resource. The required format is: * * projects/{project}/locations/{location} * * Regardless of the parent specified here, as long it is contains a valid * project and location, the service will return a static list of supported * flags resources. Note that we do not yet support region-specific * flags. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the parent resource. The required format is: * * projects/{project}/locations/{location} * * Regardless of the parent specified here, as long it is contains a valid * project and location, the service will return a static list of supported * flags resources. Note that we do not yet support region-specific * flags. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the parent resource. The required format is: * * projects/{project}/locations/{location} * * Regardless of the parent specified here, as long it is contains a valid * project and location, the service will return a static list of supported * flags resources. Note that we do not yet support region-specific * flags. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the parent resource. The required format is: * * projects/{project}/locations/{location} * * Regardless of the parent specified here, as long it is contains a valid * project and location, the service will return a static list of supported * flags resources. Note that we do not yet support region-specific * flags. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the parent resource. The required format is: * * projects/{project}/locations/{location} * * Regardless of the parent specified here, as long it is contains a valid * project and location, the service will return a static list of supported * flags resources. Note that we do not yet support region-specific * flags. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int scope_ = 0; /** * * * <pre> * Optional. The scope for which supported flags are requested. If not * specified, default is DATABASE. * </pre> * * <code> * .google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for scope. */ @java.lang.Override public int getScopeValue() { return scope_; } /** * * * <pre> * Optional. The scope for which supported flags are requested. If not * specified, default is DATABASE. * </pre> * * <code> * .google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for scope to set. * @return This builder for chaining. */ public Builder setScopeValue(int value) { scope_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. The scope for which supported flags are requested. If not * specified, default is DATABASE. * </pre> * * <code> * .google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The scope. */ @java.lang.Override public com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope getScope() { com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope result = com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope.forNumber(scope_); return result == null ? com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope.UNRECOGNIZED : result; } /** * * * <pre> * Optional. The scope for which supported flags are requested. If not * specified, default is DATABASE. * </pre> * * <code> * .google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The scope to set. * @return This builder for chaining. */ public Builder setScope(com.google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; scope_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. The scope for which supported flags are requested. If not * specified, default is DATABASE. * </pre> * * <code> * .google.cloud.alloydb.v1beta.SupportedDatabaseFlag.Scope scope = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearScope() { bitField0_ = (bitField0_ & ~0x00000008); scope_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest) private static final com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest(); } public static com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSupportedDatabaseFlagsRequest> PARSER = new com.google.protobuf.AbstractParser<ListSupportedDatabaseFlagsRequest>() { @java.lang.Override public ListSupportedDatabaseFlagsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSupportedDatabaseFlagsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSupportedDatabaseFlagsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListSupportedDatabaseFlagsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/poi
37,174
poi-ooxml/src/test/java/org/apache/poi/xwpf/usermodel/TestXWPFParagraph.java
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.xwpf.usermodel; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.math.BigInteger; import java.util.List; import org.apache.poi.util.StringUtil; import org.apache.poi.xwpf.XWPFTestDataSamples; import org.junit.jupiter.api.Test; import org.openxmlformats.schemas.drawingml.x2006.picture.CTPicture; import org.openxmlformats.schemas.drawingml.x2006.picture.PicDocument; import org.openxmlformats.schemas.drawingml.x2006.picture.impl.PicDocumentImpl; import org.openxmlformats.schemas.officeDocument.x2006.sharedTypes.STOnOff1; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTBookmark; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTBorder; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTInd; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTJc; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTOnOff; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTP; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPBdr; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPPr; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTR; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSpacing; import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextAlignment; import org.openxmlformats.schemas.wordprocessingml.x2006.main.STBorder; import org.openxmlformats.schemas.wordprocessingml.x2006.main.STJc; import org.openxmlformats.schemas.wordprocessingml.x2006.main.STLineSpacingRule; import org.openxmlformats.schemas.wordprocessingml.x2006.main.STTextAlignment; /** * Tests for XWPF Paragraphs */ public final class TestXWPFParagraph { /** * Check that we get the right paragraph from the header */ @Test void testHeaderParagraph() throws IOException { try (XWPFDocument xml = XWPFTestDataSamples.openSampleDocument("ThreeColHead.docx")) { XWPFHeader hdr = xml.getHeaderFooterPolicy().getDefaultHeader(); assertNotNull(hdr); List<XWPFParagraph> ps = hdr.getParagraphs(); assertEquals(1, ps.size()); XWPFParagraph p = ps.get(0); assertEquals(5, p.getCTP().sizeOfRArray()); assertEquals("First header column!\tMid header\tRight header!", p.getText()); } } /** * Check that we get the right paragraphs from the document */ @Test void testDocumentParagraph() throws IOException { try (XWPFDocument xml = XWPFTestDataSamples.openSampleDocument("ThreeColHead.docx")) { List<XWPFParagraph> ps = xml.getParagraphs(); assertEquals(10, ps.size()); assertFalse(ps.get(0).isEmpty()); assertEquals( "This is a sample word document. It has two pages. It has a three column heading, but no footer.", ps.get(0).getText()); assertTrue(ps.get(1).isEmpty()); assertEquals("", ps.get(1).getText()); assertFalse(ps.get(2).isEmpty()); assertEquals("HEADING TEXT", ps.get(2).getText()); assertTrue(ps.get(3).isEmpty()); assertEquals("", ps.get(3).getText()); assertFalse(ps.get(4).isEmpty()); assertEquals("More on page one", ps.get(4).getText()); } } @Test void testSetGetBorderTop() throws IOException { //new clean instance of paragraph try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); assertEquals(STBorder.NONE.intValue(), p.getBorderTop().getValue()); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); CTPBdr bdr = ppr.addNewPBdr(); CTBorder borderTop = bdr.addNewTop(); borderTop.setVal(STBorder.DOUBLE); bdr.setTop(borderTop); assertEquals(Borders.DOUBLE, p.getBorderTop()); p.setBorderTop(Borders.SINGLE); assertEquals(STBorder.SINGLE, borderTop.getVal()); } } @Test void testSetGetAlignment() throws IOException { //new clean instance of paragraph try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); assertEquals(STJc.LEFT.intValue(), p.getAlignment().getValue()); assertFalse(p.isAlignmentSet()); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); CTJc align = ppr.addNewJc(); align.setVal(STJc.CENTER); assertEquals(ParagraphAlignment.CENTER, p.getAlignment()); assertTrue(p.isAlignmentSet()); p.setAlignment(ParagraphAlignment.BOTH); assertEquals(STJc.BOTH, ppr.getJc().getVal()); assertTrue(p.isAlignmentSet()); p.setAlignment(null); assertEquals(STJc.LEFT.intValue(), p.getAlignment().getValue()); assertFalse(p.isAlignmentSet()); } } @Test void testSetGetSpacing() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); assertEquals(-1, p.getSpacingBefore()); assertEquals(-1, p.getSpacingAfter()); assertEquals(-1, p.getSpacingBetween(), 0.1); assertEquals(LineSpacingRule.AUTO, p.getSpacingLineRule()); CTSpacing spacing = ppr.addNewSpacing(); spacing.setAfter(new BigInteger("10")); assertEquals(10, p.getSpacingAfter()); spacing.setBefore(new BigInteger("10")); assertEquals(10, p.getSpacingBefore()); p.setSpacingAfter(100); assertEquals("100", spacing.xgetAfter().getStringValue()); p.setSpacingBefore(100); assertEquals("100", spacing.xgetBefore().getStringValue()); p.setSpacingBetween(.25, LineSpacingRule.EXACT); assertEquals(.25, p.getSpacingBetween(), 0.01); assertEquals(LineSpacingRule.EXACT, p.getSpacingLineRule()); p.setSpacingBetween(1.25, LineSpacingRule.AUTO); assertEquals(1.25, p.getSpacingBetween(), 0.01); assertEquals(LineSpacingRule.AUTO, p.getSpacingLineRule()); p.setSpacingBetween(.5, LineSpacingRule.AT_LEAST); assertEquals(.5, p.getSpacingBetween(), 0.01); assertEquals(LineSpacingRule.AT_LEAST, p.getSpacingLineRule()); p.setSpacingBetween(1.15); assertEquals(1.15, p.getSpacingBetween(), 0.01); assertEquals(LineSpacingRule.AUTO, p.getSpacingLineRule()); } } @Test void testSetGetSpacingLineRule() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); assertEquals(STLineSpacingRule.INT_AUTO, p.getSpacingLineRule().getValue()); CTSpacing spacing = ppr.addNewSpacing(); spacing.setLineRule(STLineSpacingRule.AT_LEAST); assertEquals(LineSpacingRule.AT_LEAST, p.getSpacingLineRule()); p.setSpacingAfter(100); assertEquals("100", spacing.xgetAfter().getStringValue()); } } @Test void testSetGetIndentationChars() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); assertEquals(-1, p.getIndentationLeftChars()); assertEquals(-1, p.getIndentationRightChars()); // set 1.5 characters p.setIndentationLeftChars(150); assertEquals(150, p.getIndentationLeftChars()); p.setIndentationRightChars(250); assertEquals(250, p.getIndentationRightChars()); } } @Test void testSetGetIndentation() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); assertEquals(-1, p.getIndentationLeft()); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); assertEquals(-1, p.getIndentationLeft()); CTInd ind = ppr.addNewInd(); ind.setLeft(new BigInteger("10")); assertEquals(10, p.getIndentationLeft()); p.setIndentationLeft(100); assertEquals("100", ind.xgetLeft().getStringValue()); } } @Test void testSetGetVerticalAlignment() throws IOException { //new clean instance of paragraph try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); CTTextAlignment txtAlign = ppr.addNewTextAlignment(); txtAlign.setVal(STTextAlignment.CENTER); assertEquals(TextAlignment.CENTER, p.getVerticalAlignment()); p.setVerticalAlignment(TextAlignment.BOTTOM); assertEquals(STTextAlignment.BOTTOM, ppr.getTextAlignment().getVal()); } } @Test void testSetGetWordWrap() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); CTOnOff wordWrap = ppr.addNewWordWrap(); wordWrap.setVal(STOnOff1.OFF); assertFalse(p.isWordWrap()); p.setWordWrapped(true); assertEquals("on", ppr.getWordWrap().getVal()); } } @Test void testSetGetPageBreak() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); CTP ctp = p.getCTP(); CTPPr ppr = ctp.getPPr() == null ? ctp.addNewPPr() : ctp.getPPr(); CTOnOff pageBreak = ppr.addNewPageBreakBefore(); pageBreak.setVal(STOnOff1.OFF); assertFalse(p.isPageBreak()); p.setPageBreak(true); assertEquals("on", ppr.getPageBreakBefore().getVal()); } } @Test void testBookmarks() throws IOException { try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("bookmarks.docx")) { XWPFParagraph paragraph = doc.getParagraphs().get(0); assertEquals("Sample Word Document", paragraph.getText()); assertEquals(1, paragraph.getCTP().sizeOfBookmarkStartArray()); assertEquals(0, paragraph.getCTP().sizeOfBookmarkEndArray()); CTBookmark ctBookmark = paragraph.getCTP().getBookmarkStartArray(0); assertEquals("poi", ctBookmark.getName()); for (CTBookmark bookmark : paragraph.getCTP().getBookmarkStartList()) { assertEquals("poi", bookmark.getName()); } } } @Test void testGetSetNumID() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); p.setNumID(new BigInteger("10")); assertEquals("10", p.getNumID().toString()); } } @Test void testGetSetILvl() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); p.setNumILvl(new BigInteger("1")); assertEquals("1", p.getNumIlvl().toString()); } } @Test void testAddingRuns() throws IOException { try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("sample.docx")) { XWPFParagraph p = doc.getParagraphs().get(0); assertEquals(2, p.getRuns().size()); assertEquals(2, p.getIRuns().size()); XWPFRun r = p.createRun(); assertEquals(3, p.getRuns().size()); assertEquals(2, p.getRuns().indexOf(r)); assertEquals(3, p.getIRuns().size()); assertEquals(2, p.getIRuns().indexOf(r)); XWPFRun r2 = p.insertNewRun(1); assertEquals(4, p.getRuns().size()); assertEquals(1, p.getRuns().indexOf(r2)); assertEquals(3, p.getRuns().indexOf(r)); assertEquals(4, p.getIRuns().size()); assertEquals(1, p.getIRuns().indexOf(r2)); assertEquals(3, p.getIRuns().indexOf(r)); } } @Test void testCreateNewRuns() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); XWPFHyperlinkRun h = p.createHyperlinkRun("http://poi.apache.org"); XWPFFieldRun fieldRun = p.createFieldRun(); XWPFRun r = p.createRun(); assertEquals(3, p.getRuns().size()); assertEquals(0, p.getRuns().indexOf(h)); assertEquals(1, p.getRuns().indexOf(fieldRun)); assertEquals(2, p.getRuns().indexOf(r)); assertEquals(3, p.getIRuns().size()); assertEquals(0, p.getIRuns().indexOf(h)); assertEquals(1, p.getIRuns().indexOf(fieldRun)); assertEquals(2, p.getIRuns().indexOf(r)); } } @Test void testInsertNewRuns() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); XWPFRun r = p.createRun(); assertEquals(1, p.getRuns().size()); assertEquals(0, p.getRuns().indexOf(r)); XWPFHyperlinkRun h = p.insertNewHyperlinkRun(0, "http://poi.apache.org"); assertEquals(2, p.getRuns().size()); assertEquals(0, p.getRuns().indexOf(h)); assertEquals(1, p.getRuns().indexOf(r)); XWPFFieldRun fieldRun2 = p.insertNewFieldRun(2); assertEquals(3, p.getRuns().size()); assertEquals(2, p.getRuns().indexOf(fieldRun2)); } } @Test void testRemoveRuns() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); XWPFRun r = p.createRun(); p.createRun(); XWPFHyperlinkRun hyperlinkRun = p .createHyperlinkRun("http://poi.apache.org"); XWPFFieldRun fieldRun = p.createFieldRun(); assertEquals(4, p.getRuns().size()); assertEquals(2, p.getRuns().indexOf(hyperlinkRun)); assertEquals(3, p.getRuns().indexOf(fieldRun)); p.removeRun(2); assertEquals(3, p.getRuns().size()); assertEquals(-1, p.getRuns().indexOf(hyperlinkRun)); assertEquals(2, p.getRuns().indexOf(fieldRun)); p.removeRun(0); assertEquals(2, p.getRuns().size()); assertEquals(-1, p.getRuns().indexOf(r)); assertEquals(1, p.getRuns().indexOf(fieldRun)); p.removeRun(1); assertEquals(1, p.getRuns().size()); assertEquals(-1, p.getRuns().indexOf(fieldRun)); } } @Test void testRemoveAndInsertRunsWithOtherIRunElement() throws IOException { XWPFDocument doc = new XWPFDocument(); XWPFParagraph p = doc.createParagraph(); p.createRun(); // add other run element p.getCTP().addNewSdt(); // add two CTR in hyperlink XWPFHyperlinkRun hyperlinkRun = p .createHyperlinkRun("http://poi.apache.org"); hyperlinkRun.getCTHyperlink().addNewR(); p.createFieldRun(); XWPFDocument doc2 = XWPFTestDataSamples.writeOutAndReadBack(doc); XWPFParagraph paragraph = doc2.getParagraphArray(0); assertEquals(4, paragraph.getRuns().size()); assertEquals(5, paragraph.getIRuns().size()); assertInstanceOf(XWPFHyperlinkRun.class, paragraph.getRuns().get(1)); assertInstanceOf(XWPFHyperlinkRun.class, paragraph.getRuns().get(2)); assertInstanceOf(XWPFFieldRun.class, paragraph.getRuns().get(3)); assertInstanceOf(XWPFSDT.class, paragraph.getIRuns().get(1)); assertInstanceOf(XWPFHyperlinkRun.class, paragraph.getIRuns().get(2)); paragraph.removeRun(1); assertEquals(3, paragraph.getRuns().size()); assertInstanceOf(XWPFHyperlinkRun.class, paragraph.getRuns().get(1)); assertInstanceOf(XWPFFieldRun.class, paragraph.getRuns().get(2)); assertInstanceOf(XWPFSDT.class, paragraph.getIRuns().get(1)); assertInstanceOf(XWPFHyperlinkRun.class, paragraph.getIRuns().get(2)); paragraph.removeRun(1); assertEquals(2, paragraph.getRuns().size()); assertInstanceOf(XWPFFieldRun.class, paragraph.getRuns().get(1)); assertInstanceOf(XWPFSDT.class, paragraph.getIRuns().get(1)); assertInstanceOf(XWPFFieldRun.class, paragraph.getIRuns().get(2)); paragraph.removeRun(0); assertEquals(1, paragraph.getRuns().size()); assertInstanceOf(XWPFFieldRun.class, paragraph.getRuns().get(0)); assertInstanceOf(XWPFSDT.class, paragraph.getIRuns().get(0)); assertInstanceOf(XWPFFieldRun.class, paragraph.getIRuns().get(1)); XWPFRun newRun = paragraph.insertNewRun(0); assertEquals(2, paragraph.getRuns().size()); assertEquals(3, paragraph.getIRuns().size()); assertEquals(0, paragraph.getRuns().indexOf(newRun)); doc.close(); doc2.close(); } @Test void testPictures() throws IOException { try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("VariousPictures.docx")) { assertEquals(7, doc.getParagraphs().size()); XWPFParagraph p; XWPFRun r; // Text paragraphs assertEquals("Sheet with various pictures", doc.getParagraphs().get(0).getText()); assertEquals("(jpeg, png, wmf, emf and pict) ", doc.getParagraphs().get(1).getText()); // Spacer ones assertEquals("", doc.getParagraphs().get(2).getText()); assertEquals("", doc.getParagraphs().get(3).getText()); assertEquals("", doc.getParagraphs().get(4).getText()); // Image one p = doc.getParagraphs().get(5); assertEquals(6, p.getRuns().size()); r = p.getRuns().get(0); assertEquals("", r.toString()); assertEquals(1, r.getEmbeddedPictures().size()); assertNotNull(r.getEmbeddedPictures().get(0).getPictureData()); assertEquals("image1.wmf", r.getEmbeddedPictures().get(0).getPictureData().getFileName()); r = p.getRuns().get(1); assertEquals("", r.toString()); assertEquals(1, r.getEmbeddedPictures().size()); assertNotNull(r.getEmbeddedPictures().get(0).getPictureData()); assertEquals("image2.png", r.getEmbeddedPictures().get(0).getPictureData().getFileName()); r = p.getRuns().get(2); assertEquals("", r.toString()); assertEquals(1, r.getEmbeddedPictures().size()); assertNotNull(r.getEmbeddedPictures().get(0).getPictureData()); assertEquals("image3.emf", r.getEmbeddedPictures().get(0).getPictureData().getFileName()); r = p.getRuns().get(3); assertEquals("", r.toString()); assertEquals(1, r.getEmbeddedPictures().size()); assertNotNull(r.getEmbeddedPictures().get(0).getPictureData()); assertEquals("image4.emf", r.getEmbeddedPictures().get(0).getPictureData().getFileName()); r = p.getRuns().get(4); assertEquals("", r.toString()); assertEquals(1, r.getEmbeddedPictures().size()); assertNotNull(r.getEmbeddedPictures().get(0).getPictureData()); assertEquals("image5.jpeg", r.getEmbeddedPictures().get(0).getPictureData().getFileName()); r = p.getRuns().get(5); assertEquals(" ", r.toString()); assertEquals(0, r.getEmbeddedPictures().size()); // Final spacer assertEquals("", doc.getParagraphs().get(6).getText()); // Look in detail at one r = p.getRuns().get(4); XWPFPicture pict = r.getEmbeddedPictures().get(0); CTPicture picture = pict.getCTPicture(); assertEquals("rId8", picture.getBlipFill().getBlip().getEmbed()); // Ensure that the ooxml compiler finds everything we need r.getCTR().getDrawingArray(0); r.getCTR().getDrawingArray(0).getInlineArray(0); r.getCTR().getDrawingArray(0).getInlineArray(0).getGraphic(); r.getCTR().getDrawingArray(0).getInlineArray(0).getGraphic().getGraphicData(); PicDocument pd = new PicDocumentImpl(null); assertTrue(pd.isNil()); } } @Test void testTika792() throws Exception { //This test forces the loading of CTMoveBookmark and //CTMoveBookmarkImpl into ooxml-lite. try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("Tika-792.docx")) { XWPFParagraph paragraph = doc.getParagraphs().get(0); assertEquals("", paragraph.getText()); paragraph = doc.getParagraphs().get(1); assertEquals("b", paragraph.getText()); } } @Test void testSettersGetters() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); assertTrue(p.isEmpty()); assertFalse(p.removeRun(0)); p.setBorderTop(Borders.BABY_PACIFIER); p.setBorderBetween(Borders.BABY_PACIFIER); p.setBorderBottom(Borders.BABY_RATTLE); assertNotNull(p.getIRuns()); assertEquals(0, p.getIRuns().size()); assertFalse(p.isEmpty()); assertNull(p.getStyleID()); assertNull(p.getStyle()); assertNull(p.getNumID()); p.setNumID(BigInteger.valueOf(12)); assertEquals(BigInteger.valueOf(12), p.getNumID()); p.setNumID(BigInteger.valueOf(13)); assertEquals(BigInteger.valueOf(13), p.getNumID()); assertNull(p.getNumFmt()); assertNull(p.getNumIlvl()); assertEquals("", p.getParagraphText()); assertEquals("", p.getPictureText()); assertEquals("", p.getFootnoteText()); p.setBorderBetween(Borders.NONE); assertEquals(Borders.NONE, p.getBorderBetween()); p.setBorderBetween(Borders.BASIC_BLACK_DASHES); assertEquals(Borders.BASIC_BLACK_DASHES, p.getBorderBetween()); p.setBorderBottom(Borders.NONE); assertEquals(Borders.NONE, p.getBorderBottom()); p.setBorderBottom(Borders.BABY_RATTLE); assertEquals(Borders.BABY_RATTLE, p.getBorderBottom()); p.setBorderLeft(Borders.NONE); assertEquals(Borders.NONE, p.getBorderLeft()); p.setBorderLeft(Borders.BASIC_WHITE_SQUARES); assertEquals(Borders.BASIC_WHITE_SQUARES, p.getBorderLeft()); p.setBorderRight(Borders.NONE); assertEquals(Borders.NONE, p.getBorderRight()); p.setBorderRight(Borders.BASIC_WHITE_DASHES); assertEquals(Borders.BASIC_WHITE_DASHES, p.getBorderRight()); p.setBorderBottom(Borders.NONE); assertEquals(Borders.NONE, p.getBorderBottom()); p.setBorderBottom(Borders.BASIC_WHITE_DOTS); assertEquals(Borders.BASIC_WHITE_DOTS, p.getBorderBottom()); assertFalse(p.isPageBreak()); p.setPageBreak(true); assertTrue(p.isPageBreak()); p.setPageBreak(false); assertFalse(p.isPageBreak()); assertFalse(p.isKeepNext()); p.setKeepNext(true); assertTrue(p.isKeepNext()); p.setKeepNext(false); assertFalse(p.isKeepNext()); assertEquals(-1, p.getSpacingAfter()); p.setSpacingAfter(12); assertEquals(12, p.getSpacingAfter()); assertEquals(-1, p.getSpacingAfterLines()); p.setSpacingAfterLines(14); assertEquals(14, p.getSpacingAfterLines()); assertEquals(-1, p.getSpacingBefore()); p.setSpacingBefore(16); assertEquals(16, p.getSpacingBefore()); assertEquals(-1, p.getSpacingBeforeLines()); p.setSpacingBeforeLines(18); assertEquals(18, p.getSpacingBeforeLines()); assertEquals(LineSpacingRule.AUTO, p.getSpacingLineRule()); p.setSpacingLineRule(LineSpacingRule.EXACT); assertEquals(LineSpacingRule.EXACT, p.getSpacingLineRule()); assertEquals(-1, p.getIndentationLeft()); p.setIndentationLeft(21); assertEquals(21, p.getIndentationLeft()); assertEquals(-1, p.getIndentationRight()); p.setIndentationRight(25); assertEquals(25, p.getIndentationRight()); assertEquals(-1, p.getIndentationHanging()); p.setIndentationHanging(25); assertEquals(25, p.getIndentationHanging()); assertEquals(-1, p.getIndentationFirstLine()); p.setIndentationFirstLine(25); assertEquals(25, p.getIndentationFirstLine()); assertFalse(p.isWordWrap()); p.setWordWrapped(true); assertTrue(p.isWordWrap()); p.setWordWrapped(false); assertFalse(p.isWordWrap()); assertNull(p.getStyle()); p.setStyle("testStyle"); assertEquals("testStyle", p.getStyle()); p.addRun(CTR.Factory.newInstance()); //assertTrue(p.removeRun(0)); assertNotNull(p.getBody()); assertEquals(BodyElementType.PARAGRAPH, p.getElementType()); assertEquals(BodyType.DOCUMENT, p.getPartType()); } } @Test void testSearchTextNotFound() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); assertNull(p.searchText("test", new PositionInParagraph())); assertEquals("", p.getText()); } } @Test void testSearchTextFound() throws IOException { try (XWPFDocument xml = XWPFTestDataSamples.openSampleDocument("ThreeColHead.docx")) { List<XWPFParagraph> ps = xml.getParagraphs(); assertEquals(10, ps.size()); XWPFParagraph p = ps.get(0); TextSegment segment = p.searchText("sample word document", new PositionInParagraph()); assertNotNull(segment); assertEquals("sample word document", p.getText(segment)); assertTrue(p.removeRun(0)); } } @Test void testFieldRuns() throws IOException { try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("FldSimple.docx")) { List<XWPFParagraph> ps = doc.getParagraphs(); assertEquals(1, ps.size()); XWPFParagraph p = ps.get(0); assertEquals(1, p.getRuns().size()); assertEquals(1, p.getIRuns().size()); XWPFRun r = p.getRuns().get(0); assertEquals(XWPFFieldRun.class, r.getClass()); XWPFFieldRun fr = (XWPFFieldRun) r; assertEquals(" FILENAME \\* MERGEFORMAT ", fr.getFieldInstruction()); assertEquals("FldSimple.docx", fr.text()); assertEquals("FldSimple.docx", p.getText()); } } @SuppressWarnings("deprecation") @Test void testRuns() throws IOException { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); CTR run = CTR.Factory.newInstance(); XWPFRun r = new XWPFRun(run, doc.createParagraph()); p.addRun(r); p.addRun(r); assertNotNull(p.getRun(run)); assertNull(p.getRun(null)); } } @Test void test58067() throws IOException { try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("58067.docx")) { StringBuilder str = new StringBuilder(); for (XWPFParagraph par : doc.getParagraphs()) { str.append(par.getText()).append("\n"); } assertEquals("This is a test.\n\n\n\n3\n4\n5\n\n\n\nThis is a whole paragraph where one word is deleted.\n", str.toString()); } } @Test void test61787() throws IOException { try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("61787.docx")) { StringBuilder str = new StringBuilder(); for (XWPFParagraph par : doc.getParagraphs()) { str.append(par.getText()).append("\n"); } String s = str.toString(); assertTrue(StringUtil.isNotBlank(s), "Having text: \n" + s + "\nTrimmed length: " + s.trim().length()); } } /** * Tests for numbered lists * * See also https://github.com/jimklo/apache-poi-sample/blob/master/src/main/java/com/sri/jklo/StyledDocument.java * for someone else trying a similar thing */ @Test void testNumberedLists() throws Exception { try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("ComplexNumberedLists.docx")) { XWPFParagraph p; p = doc.getParagraphArray(0); assertEquals("This is a document with numbered lists", p.getText()); assertNull(p.getNumID()); assertNull(p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(1); assertEquals("Entry #1", p.getText()); assertEquals(BigInteger.valueOf(1), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(2); assertEquals("Entry #2, with children", p.getText()); assertEquals(BigInteger.valueOf(1), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(3); assertEquals("2-a", p.getText()); assertEquals(BigInteger.valueOf(1), p.getNumID()); assertEquals(BigInteger.valueOf(1), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(4); assertEquals("2-b", p.getText()); assertEquals(BigInteger.valueOf(1), p.getNumID()); assertEquals(BigInteger.valueOf(1), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(5); assertEquals("2-c", p.getText()); assertEquals(BigInteger.valueOf(1), p.getNumID()); assertEquals(BigInteger.valueOf(1), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(6); assertEquals("Entry #3", p.getText()); assertEquals(BigInteger.valueOf(1), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(7); assertEquals("Entry #4", p.getText()); assertEquals(BigInteger.valueOf(1), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); assertNull(p.getNumStartOverride()); // New list p = doc.getParagraphArray(8); assertEquals("Restarted to 1 from 5", p.getText()); assertEquals(BigInteger.valueOf(2), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(9); assertEquals("Restarted @ 2", p.getText()); assertEquals(BigInteger.valueOf(2), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); assertNull(p.getNumStartOverride()); p = doc.getParagraphArray(10); assertEquals("Restarted @ 3", p.getText()); assertEquals(BigInteger.valueOf(2), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); assertNull(p.getNumStartOverride()); // New list starting at 10 p = doc.getParagraphArray(11); assertEquals("Jump to new list at 10", p.getText()); assertEquals(BigInteger.valueOf(6), p.getNumID()); assertEquals(BigInteger.valueOf(0), p.getNumIlvl()); // TODO Why isn't this seen as 10? assertNull(p.getNumStartOverride()); // TODO Shouldn't we use XWPFNumbering or similar here? // TODO Make it easier to change } } @Test void testRunIsEmpty() throws Exception { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph p = doc.createParagraph(); assertTrue(p.runsIsEmpty()); p.createRun().setText("abc"); assertFalse(p.runsIsEmpty()); } } @Test void testSearchText() throws Exception { try (XWPFDocument doc = new XWPFDocument()) { XWPFParagraph paragraph = doc.createParagraph(); paragraph.createRun().setText("abc"); paragraph.createRun().setText("de"); paragraph.createRun().setText("f"); paragraph.createRun().setText("g"); checkSearchText(paragraph, "a", 0, 0, 0, 0, 0, 0); checkSearchText(paragraph, "b", 0, 0, 0, 0, 1, 1); checkSearchText(paragraph, "ab", 0, 0, 0, 0, 0, 1); checkSearchText(paragraph, "abc", 0, 0, 0, 0, 0, 2); checkSearchText(paragraph, "abcd", 0, 1, 0, 0, 0, 0); checkSearchText(paragraph, "abcde", 0, 1, 0, 0, 0, 1); checkSearchText(paragraph, "bcde", 0, 1, 0, 0, 1, 1); checkSearchText(paragraph, "bcdef", 0, 2, 0, 0, 1, 0); checkSearchText(paragraph, "bcdefg", 0, 3, 0, 0, 1, 0); checkSearchText(paragraph, "cdefg", 0, 3, 0, 0, 2, 0); checkSearchText(paragraph, "defg", 1, 3, 0, 0, 0, 0); checkSearchText(paragraph, "d", 1, 1, 0, 0, 0, 0); checkSearchText(paragraph, "de", 1, 1, 0, 0, 0, 1); checkSearchText(paragraph, "ef", 1, 2, 0, 0, 1, 0); checkSearchText(paragraph, "f", 2, 2, 0, 0, 0, 0); checkSearchText(paragraph, "fg", 2, 3, 0, 0, 0, 0); checkSearchText(paragraph, "g", 3, 3, 0, 0, 0, 0); } } private static void checkSearchText(XWPFParagraph paragraph, String search, int beginRun, int endRun, int beginText, int endText, int beginChar, int endChar) { TextSegment result = paragraph.searchText(search, new PositionInParagraph()); assertEquals(beginRun, result.getBeginRun(), "beginRun"); assertEquals(endRun, result.getEndRun(), "endRun"); assertEquals(beginText, result.getBeginText(), "beginText"); assertEquals(endText, result.getEndText(), "endText"); assertEquals(beginChar, result.getBeginChar(), "beginChar"); assertEquals(endChar, result.getEndChar(), "endChar"); } }
googleapis/google-cloud-java
37,046
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/MetricxSpec.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Spec for MetricX metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.MetricxSpec} */ public final class MetricxSpec extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.MetricxSpec) MetricxSpecOrBuilder { private static final long serialVersionUID = 0L; // Use MetricxSpec.newBuilder() to construct. private MetricxSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MetricxSpec() { version_ = 0; sourceLanguage_ = ""; targetLanguage_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MetricxSpec(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_MetricxSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_MetricxSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.MetricxSpec.class, com.google.cloud.aiplatform.v1.MetricxSpec.Builder.class); } /** * * * <pre> * MetricX Version options. * </pre> * * Protobuf enum {@code google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion} */ public enum MetricxVersion implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * MetricX version unspecified. * </pre> * * <code>METRICX_VERSION_UNSPECIFIED = 0;</code> */ METRICX_VERSION_UNSPECIFIED(0), /** * * * <pre> * MetricX 2024 (2.6) for translation + reference (reference-based). * </pre> * * <code>METRICX_24_REF = 1;</code> */ METRICX_24_REF(1), /** * * * <pre> * MetricX 2024 (2.6) for translation + source (QE). * </pre> * * <code>METRICX_24_SRC = 2;</code> */ METRICX_24_SRC(2), /** * * * <pre> * MetricX 2024 (2.6) for translation + source + reference * (source-reference-combined). * </pre> * * <code>METRICX_24_SRC_REF = 3;</code> */ METRICX_24_SRC_REF(3), UNRECOGNIZED(-1), ; /** * * * <pre> * MetricX version unspecified. * </pre> * * <code>METRICX_VERSION_UNSPECIFIED = 0;</code> */ public static final int METRICX_VERSION_UNSPECIFIED_VALUE = 0; /** * * * <pre> * MetricX 2024 (2.6) for translation + reference (reference-based). * </pre> * * <code>METRICX_24_REF = 1;</code> */ public static final int METRICX_24_REF_VALUE = 1; /** * * * <pre> * MetricX 2024 (2.6) for translation + source (QE). * </pre> * * <code>METRICX_24_SRC = 2;</code> */ public static final int METRICX_24_SRC_VALUE = 2; /** * * * <pre> * MetricX 2024 (2.6) for translation + source + reference * (source-reference-combined). * </pre> * * <code>METRICX_24_SRC_REF = 3;</code> */ public static final int METRICX_24_SRC_REF_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MetricxVersion valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static MetricxVersion forNumber(int value) { switch (value) { case 0: return METRICX_VERSION_UNSPECIFIED; case 1: return METRICX_24_REF; case 2: return METRICX_24_SRC; case 3: return METRICX_24_SRC_REF; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<MetricxVersion> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<MetricxVersion> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<MetricxVersion>() { public MetricxVersion findValueByNumber(int number) { return MetricxVersion.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.aiplatform.v1.MetricxSpec.getDescriptor().getEnumTypes().get(0); } private static final MetricxVersion[] VALUES = values(); public static MetricxVersion valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private MetricxVersion(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion) } private int bitField0_; public static final int VERSION_FIELD_NUMBER = 1; private int version_ = 0; /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for version. */ @java.lang.Override public int getVersionValue() { return version_; } /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The version. */ @java.lang.Override public com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion getVersion() { com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion result = com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion.forNumber(version_); return result == null ? com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion.UNRECOGNIZED : result; } public static final int SOURCE_LANGUAGE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object sourceLanguage_ = ""; /** * * * <pre> * Optional. Source language in BCP-47 format. * </pre> * * <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The sourceLanguage. */ @java.lang.Override public java.lang.String getSourceLanguage() { java.lang.Object ref = sourceLanguage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceLanguage_ = s; return s; } } /** * * * <pre> * Optional. Source language in BCP-47 format. * </pre> * * <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for sourceLanguage. */ @java.lang.Override public com.google.protobuf.ByteString getSourceLanguageBytes() { java.lang.Object ref = sourceLanguage_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceLanguage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TARGET_LANGUAGE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object targetLanguage_ = ""; /** * * * <pre> * Optional. Target language in BCP-47 format. Covers both prediction and * reference. * </pre> * * <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The targetLanguage. */ @java.lang.Override public java.lang.String getTargetLanguage() { java.lang.Object ref = targetLanguage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); targetLanguage_ = s; return s; } } /** * * * <pre> * Optional. Target language in BCP-47 format. Covers both prediction and * reference. * </pre> * * <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for targetLanguage. */ @java.lang.Override public com.google.protobuf.ByteString getTargetLanguageBytes() { java.lang.Object ref = targetLanguage_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); targetLanguage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeEnum(1, version_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceLanguage_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, targetLanguage_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, version_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceLanguage_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, targetLanguage_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.MetricxSpec)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.MetricxSpec other = (com.google.cloud.aiplatform.v1.MetricxSpec) obj; if (hasVersion() != other.hasVersion()) return false; if (hasVersion()) { if (version_ != other.version_) return false; } if (!getSourceLanguage().equals(other.getSourceLanguage())) return false; if (!getTargetLanguage().equals(other.getTargetLanguage())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + version_; } hash = (37 * hash) + SOURCE_LANGUAGE_FIELD_NUMBER; hash = (53 * hash) + getSourceLanguage().hashCode(); hash = (37 * hash) + TARGET_LANGUAGE_FIELD_NUMBER; hash = (53 * hash) + getTargetLanguage().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.MetricxSpec parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.MetricxSpec prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Spec for MetricX metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.MetricxSpec} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.MetricxSpec) com.google.cloud.aiplatform.v1.MetricxSpecOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_MetricxSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_MetricxSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.MetricxSpec.class, com.google.cloud.aiplatform.v1.MetricxSpec.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.MetricxSpec.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; version_ = 0; sourceLanguage_ = ""; targetLanguage_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_MetricxSpec_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.MetricxSpec getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.MetricxSpec.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.MetricxSpec build() { com.google.cloud.aiplatform.v1.MetricxSpec result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.MetricxSpec buildPartial() { com.google.cloud.aiplatform.v1.MetricxSpec result = new com.google.cloud.aiplatform.v1.MetricxSpec(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.MetricxSpec result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.version_ = version_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.sourceLanguage_ = sourceLanguage_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.targetLanguage_ = targetLanguage_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.MetricxSpec) { return mergeFrom((com.google.cloud.aiplatform.v1.MetricxSpec) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.MetricxSpec other) { if (other == com.google.cloud.aiplatform.v1.MetricxSpec.getDefaultInstance()) return this; if (other.hasVersion()) { setVersion(other.getVersion()); } if (!other.getSourceLanguage().isEmpty()) { sourceLanguage_ = other.sourceLanguage_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getTargetLanguage().isEmpty()) { targetLanguage_ = other.targetLanguage_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { version_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { sourceLanguage_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { targetLanguage_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int version_ = 0; /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for version. */ @java.lang.Override public int getVersionValue() { return version_; } /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The enum numeric value on the wire for version to set. * @return This builder for chaining. */ public Builder setVersionValue(int value) { version_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The version. */ @java.lang.Override public com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion getVersion() { com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion result = com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion.forNumber(version_); return result == null ? com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion.UNRECOGNIZED : result; } /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(com.google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; version_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. Which version to use for evaluation. * </pre> * * <code> * optional .google.cloud.aiplatform.v1.MetricxSpec.MetricxVersion version = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = 0; onChanged(); return this; } private java.lang.Object sourceLanguage_ = ""; /** * * * <pre> * Optional. Source language in BCP-47 format. * </pre> * * <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The sourceLanguage. */ public java.lang.String getSourceLanguage() { java.lang.Object ref = sourceLanguage_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceLanguage_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Source language in BCP-47 format. * </pre> * * <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for sourceLanguage. */ public com.google.protobuf.ByteString getSourceLanguageBytes() { java.lang.Object ref = sourceLanguage_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceLanguage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Source language in BCP-47 format. * </pre> * * <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The sourceLanguage to set. * @return This builder for chaining. */ public Builder setSourceLanguage(java.lang.String value) { if (value == null) { throw new NullPointerException(); } sourceLanguage_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Source language in BCP-47 format. * </pre> * * <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearSourceLanguage() { sourceLanguage_ = getDefaultInstance().getSourceLanguage(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. Source language in BCP-47 format. * </pre> * * <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for sourceLanguage to set. * @return This builder for chaining. */ public Builder setSourceLanguageBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); sourceLanguage_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object targetLanguage_ = ""; /** * * * <pre> * Optional. Target language in BCP-47 format. Covers both prediction and * reference. * </pre> * * <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The targetLanguage. */ public java.lang.String getTargetLanguage() { java.lang.Object ref = targetLanguage_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); targetLanguage_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Target language in BCP-47 format. Covers both prediction and * reference. * </pre> * * <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for targetLanguage. */ public com.google.protobuf.ByteString getTargetLanguageBytes() { java.lang.Object ref = targetLanguage_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); targetLanguage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Target language in BCP-47 format. Covers both prediction and * reference. * </pre> * * <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The targetLanguage to set. * @return This builder for chaining. */ public Builder setTargetLanguage(java.lang.String value) { if (value == null) { throw new NullPointerException(); } targetLanguage_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Target language in BCP-47 format. Covers both prediction and * reference. * </pre> * * <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearTargetLanguage() { targetLanguage_ = getDefaultInstance().getTargetLanguage(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. Target language in BCP-47 format. Covers both prediction and * reference. * </pre> * * <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for targetLanguage to set. * @return This builder for chaining. */ public Builder setTargetLanguageBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); targetLanguage_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.MetricxSpec) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.MetricxSpec) private static final com.google.cloud.aiplatform.v1.MetricxSpec DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.MetricxSpec(); } public static com.google.cloud.aiplatform.v1.MetricxSpec getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MetricxSpec> PARSER = new com.google.protobuf.AbstractParser<MetricxSpec>() { @java.lang.Override public MetricxSpec parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MetricxSpec> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MetricxSpec> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.MetricxSpec getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,427
java-iam/google-iam-policy/src/main/java/com/google/iam/v3beta/stub/PolicyBindingsStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.iam.v3beta.stub; import static com.google.iam.v3beta.PolicyBindingsClient.ListPolicyBindingsPagedResponse; import static com.google.iam.v3beta.PolicyBindingsClient.SearchTargetPolicyBindingsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.iam.v3beta.CreatePolicyBindingRequest; import com.google.iam.v3beta.DeletePolicyBindingRequest; import com.google.iam.v3beta.GetPolicyBindingRequest; import com.google.iam.v3beta.ListPolicyBindingsRequest; import com.google.iam.v3beta.ListPolicyBindingsResponse; import com.google.iam.v3beta.OperationMetadata; import com.google.iam.v3beta.PolicyBinding; import com.google.iam.v3beta.SearchTargetPolicyBindingsRequest; import com.google.iam.v3beta.SearchTargetPolicyBindingsResponse; import com.google.iam.v3beta.UpdatePolicyBindingRequest; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link PolicyBindingsStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (iam.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getPolicyBinding: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * PolicyBindingsStubSettings.Builder policyBindingsSettingsBuilder = * PolicyBindingsStubSettings.newBuilder(); * policyBindingsSettingsBuilder * .getPolicyBindingSettings() * .setRetrySettings( * policyBindingsSettingsBuilder * .getPolicyBindingSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * PolicyBindingsStubSettings policyBindingsSettings = policyBindingsSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createPolicyBinding: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * PolicyBindingsStubSettings.Builder policyBindingsSettingsBuilder = * PolicyBindingsStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * policyBindingsSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @BetaApi @Generated("by gapic-generator-java") public class PolicyBindingsStubSettings extends StubSettings<PolicyBindingsStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final UnaryCallSettings<CreatePolicyBindingRequest, Operation> createPolicyBindingSettings; private final OperationCallSettings<CreatePolicyBindingRequest, PolicyBinding, OperationMetadata> createPolicyBindingOperationSettings; private final UnaryCallSettings<GetPolicyBindingRequest, PolicyBinding> getPolicyBindingSettings; private final UnaryCallSettings<UpdatePolicyBindingRequest, Operation> updatePolicyBindingSettings; private final OperationCallSettings<UpdatePolicyBindingRequest, PolicyBinding, OperationMetadata> updatePolicyBindingOperationSettings; private final UnaryCallSettings<DeletePolicyBindingRequest, Operation> deletePolicyBindingSettings; private final OperationCallSettings<DeletePolicyBindingRequest, Empty, OperationMetadata> deletePolicyBindingOperationSettings; private final PagedCallSettings< ListPolicyBindingsRequest, ListPolicyBindingsResponse, ListPolicyBindingsPagedResponse> listPolicyBindingsSettings; private final PagedCallSettings< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, SearchTargetPolicyBindingsPagedResponse> searchTargetPolicyBindingsSettings; private static final PagedListDescriptor< ListPolicyBindingsRequest, ListPolicyBindingsResponse, PolicyBinding> LIST_POLICY_BINDINGS_PAGE_STR_DESC = new PagedListDescriptor< ListPolicyBindingsRequest, ListPolicyBindingsResponse, PolicyBinding>() { @Override public String emptyToken() { return ""; } @Override public ListPolicyBindingsRequest injectToken( ListPolicyBindingsRequest payload, String token) { return ListPolicyBindingsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListPolicyBindingsRequest injectPageSize( ListPolicyBindingsRequest payload, int pageSize) { return ListPolicyBindingsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListPolicyBindingsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListPolicyBindingsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<PolicyBinding> extractResources(ListPolicyBindingsResponse payload) { return payload.getPolicyBindingsList(); } }; private static final PagedListDescriptor< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, PolicyBinding> SEARCH_TARGET_POLICY_BINDINGS_PAGE_STR_DESC = new PagedListDescriptor< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, PolicyBinding>() { @Override public String emptyToken() { return ""; } @Override public SearchTargetPolicyBindingsRequest injectToken( SearchTargetPolicyBindingsRequest payload, String token) { return SearchTargetPolicyBindingsRequest.newBuilder(payload) .setPageToken(token) .build(); } @Override public SearchTargetPolicyBindingsRequest injectPageSize( SearchTargetPolicyBindingsRequest payload, int pageSize) { return SearchTargetPolicyBindingsRequest.newBuilder(payload) .setPageSize(pageSize) .build(); } @Override public Integer extractPageSize(SearchTargetPolicyBindingsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(SearchTargetPolicyBindingsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<PolicyBinding> extractResources( SearchTargetPolicyBindingsResponse payload) { return payload.getPolicyBindingsList(); } }; private static final PagedListResponseFactory< ListPolicyBindingsRequest, ListPolicyBindingsResponse, ListPolicyBindingsPagedResponse> LIST_POLICY_BINDINGS_PAGE_STR_FACT = new PagedListResponseFactory< ListPolicyBindingsRequest, ListPolicyBindingsResponse, ListPolicyBindingsPagedResponse>() { @Override public ApiFuture<ListPolicyBindingsPagedResponse> getFuturePagedResponse( UnaryCallable<ListPolicyBindingsRequest, ListPolicyBindingsResponse> callable, ListPolicyBindingsRequest request, ApiCallContext context, ApiFuture<ListPolicyBindingsResponse> futureResponse) { PageContext<ListPolicyBindingsRequest, ListPolicyBindingsResponse, PolicyBinding> pageContext = PageContext.create( callable, LIST_POLICY_BINDINGS_PAGE_STR_DESC, request, context); return ListPolicyBindingsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, SearchTargetPolicyBindingsPagedResponse> SEARCH_TARGET_POLICY_BINDINGS_PAGE_STR_FACT = new PagedListResponseFactory< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, SearchTargetPolicyBindingsPagedResponse>() { @Override public ApiFuture<SearchTargetPolicyBindingsPagedResponse> getFuturePagedResponse( UnaryCallable<SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse> callable, SearchTargetPolicyBindingsRequest request, ApiCallContext context, ApiFuture<SearchTargetPolicyBindingsResponse> futureResponse) { PageContext< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, PolicyBinding> pageContext = PageContext.create( callable, SEARCH_TARGET_POLICY_BINDINGS_PAGE_STR_DESC, request, context); return SearchTargetPolicyBindingsPagedResponse.createAsync( pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to createPolicyBinding. */ public UnaryCallSettings<CreatePolicyBindingRequest, Operation> createPolicyBindingSettings() { return createPolicyBindingSettings; } /** Returns the object with the settings used for calls to createPolicyBinding. */ public OperationCallSettings<CreatePolicyBindingRequest, PolicyBinding, OperationMetadata> createPolicyBindingOperationSettings() { return createPolicyBindingOperationSettings; } /** Returns the object with the settings used for calls to getPolicyBinding. */ public UnaryCallSettings<GetPolicyBindingRequest, PolicyBinding> getPolicyBindingSettings() { return getPolicyBindingSettings; } /** Returns the object with the settings used for calls to updatePolicyBinding. */ public UnaryCallSettings<UpdatePolicyBindingRequest, Operation> updatePolicyBindingSettings() { return updatePolicyBindingSettings; } /** Returns the object with the settings used for calls to updatePolicyBinding. */ public OperationCallSettings<UpdatePolicyBindingRequest, PolicyBinding, OperationMetadata> updatePolicyBindingOperationSettings() { return updatePolicyBindingOperationSettings; } /** Returns the object with the settings used for calls to deletePolicyBinding. */ public UnaryCallSettings<DeletePolicyBindingRequest, Operation> deletePolicyBindingSettings() { return deletePolicyBindingSettings; } /** Returns the object with the settings used for calls to deletePolicyBinding. */ public OperationCallSettings<DeletePolicyBindingRequest, Empty, OperationMetadata> deletePolicyBindingOperationSettings() { return deletePolicyBindingOperationSettings; } /** Returns the object with the settings used for calls to listPolicyBindings. */ public PagedCallSettings< ListPolicyBindingsRequest, ListPolicyBindingsResponse, ListPolicyBindingsPagedResponse> listPolicyBindingsSettings() { return listPolicyBindingsSettings; } /** Returns the object with the settings used for calls to searchTargetPolicyBindings. */ public PagedCallSettings< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, SearchTargetPolicyBindingsPagedResponse> searchTargetPolicyBindingsSettings() { return searchTargetPolicyBindingsSettings; } public PolicyBindingsStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcPolicyBindingsStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonPolicyBindingsStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "iam"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "iam.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "iam.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(PolicyBindingsStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(PolicyBindingsStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return PolicyBindingsStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected PolicyBindingsStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); createPolicyBindingSettings = settingsBuilder.createPolicyBindingSettings().build(); createPolicyBindingOperationSettings = settingsBuilder.createPolicyBindingOperationSettings().build(); getPolicyBindingSettings = settingsBuilder.getPolicyBindingSettings().build(); updatePolicyBindingSettings = settingsBuilder.updatePolicyBindingSettings().build(); updatePolicyBindingOperationSettings = settingsBuilder.updatePolicyBindingOperationSettings().build(); deletePolicyBindingSettings = settingsBuilder.deletePolicyBindingSettings().build(); deletePolicyBindingOperationSettings = settingsBuilder.deletePolicyBindingOperationSettings().build(); listPolicyBindingsSettings = settingsBuilder.listPolicyBindingsSettings().build(); searchTargetPolicyBindingsSettings = settingsBuilder.searchTargetPolicyBindingsSettings().build(); } /** Builder for PolicyBindingsStubSettings. */ public static class Builder extends StubSettings.Builder<PolicyBindingsStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<CreatePolicyBindingRequest, Operation> createPolicyBindingSettings; private final OperationCallSettings.Builder< CreatePolicyBindingRequest, PolicyBinding, OperationMetadata> createPolicyBindingOperationSettings; private final UnaryCallSettings.Builder<GetPolicyBindingRequest, PolicyBinding> getPolicyBindingSettings; private final UnaryCallSettings.Builder<UpdatePolicyBindingRequest, Operation> updatePolicyBindingSettings; private final OperationCallSettings.Builder< UpdatePolicyBindingRequest, PolicyBinding, OperationMetadata> updatePolicyBindingOperationSettings; private final UnaryCallSettings.Builder<DeletePolicyBindingRequest, Operation> deletePolicyBindingSettings; private final OperationCallSettings.Builder< DeletePolicyBindingRequest, Empty, OperationMetadata> deletePolicyBindingOperationSettings; private final PagedCallSettings.Builder< ListPolicyBindingsRequest, ListPolicyBindingsResponse, ListPolicyBindingsPagedResponse> listPolicyBindingsSettings; private final PagedCallSettings.Builder< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, SearchTargetPolicyBindingsPagedResponse> searchTargetPolicyBindingsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(30000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(30000L)) .setTotalTimeoutDuration(Duration.ofMillis(30000L)) .build(); definitions.put("no_retry_1_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(10000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); createPolicyBindingSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createPolicyBindingOperationSettings = OperationCallSettings.newBuilder(); getPolicyBindingSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updatePolicyBindingSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updatePolicyBindingOperationSettings = OperationCallSettings.newBuilder(); deletePolicyBindingSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deletePolicyBindingOperationSettings = OperationCallSettings.newBuilder(); listPolicyBindingsSettings = PagedCallSettings.newBuilder(LIST_POLICY_BINDINGS_PAGE_STR_FACT); searchTargetPolicyBindingsSettings = PagedCallSettings.newBuilder(SEARCH_TARGET_POLICY_BINDINGS_PAGE_STR_FACT); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createPolicyBindingSettings, getPolicyBindingSettings, updatePolicyBindingSettings, deletePolicyBindingSettings, listPolicyBindingsSettings, searchTargetPolicyBindingsSettings); initDefaults(this); } protected Builder(PolicyBindingsStubSettings settings) { super(settings); createPolicyBindingSettings = settings.createPolicyBindingSettings.toBuilder(); createPolicyBindingOperationSettings = settings.createPolicyBindingOperationSettings.toBuilder(); getPolicyBindingSettings = settings.getPolicyBindingSettings.toBuilder(); updatePolicyBindingSettings = settings.updatePolicyBindingSettings.toBuilder(); updatePolicyBindingOperationSettings = settings.updatePolicyBindingOperationSettings.toBuilder(); deletePolicyBindingSettings = settings.deletePolicyBindingSettings.toBuilder(); deletePolicyBindingOperationSettings = settings.deletePolicyBindingOperationSettings.toBuilder(); listPolicyBindingsSettings = settings.listPolicyBindingsSettings.toBuilder(); searchTargetPolicyBindingsSettings = settings.searchTargetPolicyBindingsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createPolicyBindingSettings, getPolicyBindingSettings, updatePolicyBindingSettings, deletePolicyBindingSettings, listPolicyBindingsSettings, searchTargetPolicyBindingsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .createPolicyBindingSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .getPolicyBindingSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .updatePolicyBindingSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .deletePolicyBindingSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .listPolicyBindingsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .searchTargetPolicyBindingsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createPolicyBindingOperationSettings() .setInitialCallSettings( UnaryCallSettings .<CreatePolicyBindingRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(PolicyBinding.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .updatePolicyBindingOperationSettings() .setInitialCallSettings( UnaryCallSettings .<UpdatePolicyBindingRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(PolicyBinding.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .deletePolicyBindingOperationSettings() .setInitialCallSettings( UnaryCallSettings .<DeletePolicyBindingRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to createPolicyBinding. */ public UnaryCallSettings.Builder<CreatePolicyBindingRequest, Operation> createPolicyBindingSettings() { return createPolicyBindingSettings; } /** Returns the builder for the settings used for calls to createPolicyBinding. */ public OperationCallSettings.Builder< CreatePolicyBindingRequest, PolicyBinding, OperationMetadata> createPolicyBindingOperationSettings() { return createPolicyBindingOperationSettings; } /** Returns the builder for the settings used for calls to getPolicyBinding. */ public UnaryCallSettings.Builder<GetPolicyBindingRequest, PolicyBinding> getPolicyBindingSettings() { return getPolicyBindingSettings; } /** Returns the builder for the settings used for calls to updatePolicyBinding. */ public UnaryCallSettings.Builder<UpdatePolicyBindingRequest, Operation> updatePolicyBindingSettings() { return updatePolicyBindingSettings; } /** Returns the builder for the settings used for calls to updatePolicyBinding. */ public OperationCallSettings.Builder< UpdatePolicyBindingRequest, PolicyBinding, OperationMetadata> updatePolicyBindingOperationSettings() { return updatePolicyBindingOperationSettings; } /** Returns the builder for the settings used for calls to deletePolicyBinding. */ public UnaryCallSettings.Builder<DeletePolicyBindingRequest, Operation> deletePolicyBindingSettings() { return deletePolicyBindingSettings; } /** Returns the builder for the settings used for calls to deletePolicyBinding. */ public OperationCallSettings.Builder<DeletePolicyBindingRequest, Empty, OperationMetadata> deletePolicyBindingOperationSettings() { return deletePolicyBindingOperationSettings; } /** Returns the builder for the settings used for calls to listPolicyBindings. */ public PagedCallSettings.Builder< ListPolicyBindingsRequest, ListPolicyBindingsResponse, ListPolicyBindingsPagedResponse> listPolicyBindingsSettings() { return listPolicyBindingsSettings; } /** Returns the builder for the settings used for calls to searchTargetPolicyBindings. */ public PagedCallSettings.Builder< SearchTargetPolicyBindingsRequest, SearchTargetPolicyBindingsResponse, SearchTargetPolicyBindingsPagedResponse> searchTargetPolicyBindingsSettings() { return searchTargetPolicyBindingsSettings; } @Override public PolicyBindingsStubSettings build() throws IOException { return new PolicyBindingsStubSettings(this); } } }
googleapis/google-cloud-java
37,300
java-dialogflow/grpc-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/VersionsGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2beta1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/dialogflow/v2beta1/version.proto") @io.grpc.stub.annotations.GrpcGenerated public final class VersionsGrpc { private VersionsGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.dialogflow.v2beta1.Versions"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ListVersionsRequest, com.google.cloud.dialogflow.v2beta1.ListVersionsResponse> getListVersionsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListVersions", requestType = com.google.cloud.dialogflow.v2beta1.ListVersionsRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.ListVersionsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ListVersionsRequest, com.google.cloud.dialogflow.v2beta1.ListVersionsResponse> getListVersionsMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.ListVersionsRequest, com.google.cloud.dialogflow.v2beta1.ListVersionsResponse> getListVersionsMethod; if ((getListVersionsMethod = VersionsGrpc.getListVersionsMethod) == null) { synchronized (VersionsGrpc.class) { if ((getListVersionsMethod = VersionsGrpc.getListVersionsMethod) == null) { VersionsGrpc.getListVersionsMethod = getListVersionsMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.ListVersionsRequest, com.google.cloud.dialogflow.v2beta1.ListVersionsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListVersions")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.ListVersionsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.ListVersionsResponse .getDefaultInstance())) .setSchemaDescriptor(new VersionsMethodDescriptorSupplier("ListVersions")) .build(); } } } return getListVersionsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getGetVersionMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetVersion", requestType = com.google.cloud.dialogflow.v2beta1.GetVersionRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.Version.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getGetVersionMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.GetVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getGetVersionMethod; if ((getGetVersionMethod = VersionsGrpc.getGetVersionMethod) == null) { synchronized (VersionsGrpc.class) { if ((getGetVersionMethod = VersionsGrpc.getGetVersionMethod) == null) { VersionsGrpc.getGetVersionMethod = getGetVersionMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.GetVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetVersion")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.GetVersionRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.Version.getDefaultInstance())) .setSchemaDescriptor(new VersionsMethodDescriptorSupplier("GetVersion")) .build(); } } } return getGetVersionMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.CreateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getCreateVersionMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateVersion", requestType = com.google.cloud.dialogflow.v2beta1.CreateVersionRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.Version.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.CreateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getCreateVersionMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.CreateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getCreateVersionMethod; if ((getCreateVersionMethod = VersionsGrpc.getCreateVersionMethod) == null) { synchronized (VersionsGrpc.class) { if ((getCreateVersionMethod = VersionsGrpc.getCreateVersionMethod) == null) { VersionsGrpc.getCreateVersionMethod = getCreateVersionMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.CreateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateVersion")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.CreateVersionRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.Version.getDefaultInstance())) .setSchemaDescriptor(new VersionsMethodDescriptorSupplier("CreateVersion")) .build(); } } } return getCreateVersionMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getUpdateVersionMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateVersion", requestType = com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest.class, responseType = com.google.cloud.dialogflow.v2beta1.Version.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getUpdateVersionMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> getUpdateVersionMethod; if ((getUpdateVersionMethod = VersionsGrpc.getUpdateVersionMethod) == null) { synchronized (VersionsGrpc.class) { if ((getUpdateVersionMethod = VersionsGrpc.getUpdateVersionMethod) == null) { VersionsGrpc.getUpdateVersionMethod = getUpdateVersionMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateVersion")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.Version.getDefaultInstance())) .setSchemaDescriptor(new VersionsMethodDescriptorSupplier("UpdateVersion")) .build(); } } } return getUpdateVersionMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest, com.google.protobuf.Empty> getDeleteVersionMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteVersion", requestType = com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest, com.google.protobuf.Empty> getDeleteVersionMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest, com.google.protobuf.Empty> getDeleteVersionMethod; if ((getDeleteVersionMethod = VersionsGrpc.getDeleteVersionMethod) == null) { synchronized (VersionsGrpc.class) { if ((getDeleteVersionMethod = VersionsGrpc.getDeleteVersionMethod) == null) { VersionsGrpc.getDeleteVersionMethod = getDeleteVersionMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteVersion")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor(new VersionsMethodDescriptorSupplier("DeleteVersion")) .build(); } } } return getDeleteVersionMethod; } /** Creates a new async stub that supports all call types for the service */ public static VersionsStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<VersionsStub> factory = new io.grpc.stub.AbstractStub.StubFactory<VersionsStub>() { @java.lang.Override public VersionsStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsStub(channel, callOptions); } }; return VersionsStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static VersionsBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<VersionsBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<VersionsBlockingV2Stub>() { @java.lang.Override public VersionsBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsBlockingV2Stub(channel, callOptions); } }; return VersionsBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static VersionsBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<VersionsBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<VersionsBlockingStub>() { @java.lang.Override public VersionsBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsBlockingStub(channel, callOptions); } }; return VersionsBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static VersionsFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<VersionsFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<VersionsFutureStub>() { @java.lang.Override public VersionsFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsFutureStub(channel, callOptions); } }; return VersionsFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. * </pre> */ public interface AsyncService { /** * * * <pre> * Returns the list of all versions of the specified agent. * </pre> */ default void listVersions( com.google.cloud.dialogflow.v2beta1.ListVersionsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.ListVersionsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListVersionsMethod(), responseObserver); } /** * * * <pre> * Retrieves the specified agent version. * </pre> */ default void getVersion( com.google.cloud.dialogflow.v2beta1.GetVersionRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetVersionMethod(), responseObserver); } /** * * * <pre> * Creates an agent version. * The new version points to the agent instance in the "default" environment. * </pre> */ default void createVersion( com.google.cloud.dialogflow.v2beta1.CreateVersionRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateVersionMethod(), responseObserver); } /** * * * <pre> * Updates the specified agent version. * Note that this method does not allow you to update the state of the agent * the given version points to. It allows you to update only mutable * properties of the version resource. * </pre> */ default void updateVersion( com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateVersionMethod(), responseObserver); } /** * * * <pre> * Delete the specified agent version. * </pre> */ default void deleteVersion( com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteVersionMethod(), responseObserver); } } /** * Base class for the server implementation of the service Versions. * * <pre> * Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. * </pre> */ public abstract static class VersionsImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return VersionsGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service Versions. * * <pre> * Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. * </pre> */ public static final class VersionsStub extends io.grpc.stub.AbstractAsyncStub<VersionsStub> { private VersionsStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected VersionsStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsStub(channel, callOptions); } /** * * * <pre> * Returns the list of all versions of the specified agent. * </pre> */ public void listVersions( com.google.cloud.dialogflow.v2beta1.ListVersionsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.ListVersionsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListVersionsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Retrieves the specified agent version. * </pre> */ public void getVersion( com.google.cloud.dialogflow.v2beta1.GetVersionRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetVersionMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates an agent version. * The new version points to the agent instance in the "default" environment. * </pre> */ public void createVersion( com.google.cloud.dialogflow.v2beta1.CreateVersionRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateVersionMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates the specified agent version. * Note that this method does not allow you to update the state of the agent * the given version points to. It allows you to update only mutable * properties of the version resource. * </pre> */ public void updateVersion( com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateVersionMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Delete the specified agent version. * </pre> */ public void deleteVersion( com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteVersionMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service Versions. * * <pre> * Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. * </pre> */ public static final class VersionsBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<VersionsBlockingV2Stub> { private VersionsBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected VersionsBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Returns the list of all versions of the specified agent. * </pre> */ public com.google.cloud.dialogflow.v2beta1.ListVersionsResponse listVersions( com.google.cloud.dialogflow.v2beta1.ListVersionsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListVersionsMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves the specified agent version. * </pre> */ public com.google.cloud.dialogflow.v2beta1.Version getVersion( com.google.cloud.dialogflow.v2beta1.GetVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetVersionMethod(), getCallOptions(), request); } /** * * * <pre> * Creates an agent version. * The new version points to the agent instance in the "default" environment. * </pre> */ public com.google.cloud.dialogflow.v2beta1.Version createVersion( com.google.cloud.dialogflow.v2beta1.CreateVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateVersionMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified agent version. * Note that this method does not allow you to update the state of the agent * the given version points to. It allows you to update only mutable * properties of the version resource. * </pre> */ public com.google.cloud.dialogflow.v2beta1.Version updateVersion( com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateVersionMethod(), getCallOptions(), request); } /** * * * <pre> * Delete the specified agent version. * </pre> */ public com.google.protobuf.Empty deleteVersion( com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteVersionMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service Versions. * * <pre> * Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. * </pre> */ public static final class VersionsBlockingStub extends io.grpc.stub.AbstractBlockingStub<VersionsBlockingStub> { private VersionsBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected VersionsBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsBlockingStub(channel, callOptions); } /** * * * <pre> * Returns the list of all versions of the specified agent. * </pre> */ public com.google.cloud.dialogflow.v2beta1.ListVersionsResponse listVersions( com.google.cloud.dialogflow.v2beta1.ListVersionsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListVersionsMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves the specified agent version. * </pre> */ public com.google.cloud.dialogflow.v2beta1.Version getVersion( com.google.cloud.dialogflow.v2beta1.GetVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetVersionMethod(), getCallOptions(), request); } /** * * * <pre> * Creates an agent version. * The new version points to the agent instance in the "default" environment. * </pre> */ public com.google.cloud.dialogflow.v2beta1.Version createVersion( com.google.cloud.dialogflow.v2beta1.CreateVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateVersionMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified agent version. * Note that this method does not allow you to update the state of the agent * the given version points to. It allows you to update only mutable * properties of the version resource. * </pre> */ public com.google.cloud.dialogflow.v2beta1.Version updateVersion( com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateVersionMethod(), getCallOptions(), request); } /** * * * <pre> * Delete the specified agent version. * </pre> */ public com.google.protobuf.Empty deleteVersion( com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteVersionMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service Versions. * * <pre> * Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. * </pre> */ public static final class VersionsFutureStub extends io.grpc.stub.AbstractFutureStub<VersionsFutureStub> { private VersionsFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected VersionsFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new VersionsFutureStub(channel, callOptions); } /** * * * <pre> * Returns the list of all versions of the specified agent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.ListVersionsResponse> listVersions(com.google.cloud.dialogflow.v2beta1.ListVersionsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListVersionsMethod(), getCallOptions()), request); } /** * * * <pre> * Retrieves the specified agent version. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.Version> getVersion(com.google.cloud.dialogflow.v2beta1.GetVersionRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetVersionMethod(), getCallOptions()), request); } /** * * * <pre> * Creates an agent version. * The new version points to the agent instance in the "default" environment. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.Version> createVersion(com.google.cloud.dialogflow.v2beta1.CreateVersionRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateVersionMethod(), getCallOptions()), request); } /** * * * <pre> * Updates the specified agent version. * Note that this method does not allow you to update the state of the agent * the given version points to. It allows you to update only mutable * properties of the version resource. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2beta1.Version> updateVersion(com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateVersionMethod(), getCallOptions()), request); } /** * * * <pre> * Delete the specified agent version. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteVersion(com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteVersionMethod(), getCallOptions()), request); } } private static final int METHODID_LIST_VERSIONS = 0; private static final int METHODID_GET_VERSION = 1; private static final int METHODID_CREATE_VERSION = 2; private static final int METHODID_UPDATE_VERSION = 3; private static final int METHODID_DELETE_VERSION = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_LIST_VERSIONS: serviceImpl.listVersions( (com.google.cloud.dialogflow.v2beta1.ListVersionsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.dialogflow.v2beta1.ListVersionsResponse>) responseObserver); break; case METHODID_GET_VERSION: serviceImpl.getVersion( (com.google.cloud.dialogflow.v2beta1.GetVersionRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version>) responseObserver); break; case METHODID_CREATE_VERSION: serviceImpl.createVersion( (com.google.cloud.dialogflow.v2beta1.CreateVersionRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version>) responseObserver); break; case METHODID_UPDATE_VERSION: serviceImpl.updateVersion( (com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2beta1.Version>) responseObserver); break; case METHODID_DELETE_VERSION: serviceImpl.deleteVersion( (com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getListVersionsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.ListVersionsRequest, com.google.cloud.dialogflow.v2beta1.ListVersionsResponse>( service, METHODID_LIST_VERSIONS))) .addMethod( getGetVersionMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.GetVersionRequest, com.google.cloud.dialogflow.v2beta1.Version>(service, METHODID_GET_VERSION))) .addMethod( getCreateVersionMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.CreateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version>(service, METHODID_CREATE_VERSION))) .addMethod( getUpdateVersionMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.UpdateVersionRequest, com.google.cloud.dialogflow.v2beta1.Version>(service, METHODID_UPDATE_VERSION))) .addMethod( getDeleteVersionMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2beta1.DeleteVersionRequest, com.google.protobuf.Empty>(service, METHODID_DELETE_VERSION))) .build(); } private abstract static class VersionsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { VersionsBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.dialogflow.v2beta1.VersionProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("Versions"); } } private static final class VersionsFileDescriptorSupplier extends VersionsBaseDescriptorSupplier { VersionsFileDescriptorSupplier() {} } private static final class VersionsMethodDescriptorSupplier extends VersionsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; VersionsMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (VersionsGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new VersionsFileDescriptorSupplier()) .addMethod(getListVersionsMethod()) .addMethod(getGetVersionMethod()) .addMethod(getCreateVersionMethod()) .addMethod(getUpdateVersionMethod()) .addMethod(getDeleteVersionMethod()) .build(); } } } return result; } }
apache/poi
37,368
poi-examples/src/main/java/org/apache/poi/examples/ss/ToCSV.java
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.examples.ss; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FilenameFilter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import org.apache.logging.log4j.Logger; import org.apache.poi.logging.PoiLogManager; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellType; import org.apache.poi.ss.usermodel.DataFormatter; import org.apache.poi.ss.usermodel.FormulaEvaluator; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.WorkbookFactory; /** * Demonstrates <em>one</em> way to convert an Excel spreadsheet into a CSV * file. This class makes the following assumptions; * <ul> * <li>1. Where the Excel workbook contains more than one worksheet, then a single * CSV file will contain the data from all of the worksheets.</li> * <li>2. The data matrix contained in the CSV file will be square. This means that * the number of fields in each record of the CSV file will match the number * of cells in the longest row found in the Excel workbook. Any short records * will be 'padded' with empty fields - an empty field is represented in * the CSV file in this way - ,,.</li> * <li>3. Empty fields will represent missing cells.</li> * <li>4. A record consisting of empty fields will be used to represent an empty row * in the Excel workbook.</li> * </ul> * Therefore, if the worksheet looked like this; * * <pre> * ___________________________________________ * | | | | | | * | A | B | C | D | E | * ___|_______|_______|_______|_______|_______| * | | | | | | * 1 | 1 | 2 | 3 | 4 | 5 | * ___|_______|_______|_______|_______|_______| * | | | | | | * 2 | | | | | | * ___|_______|_______|_______|_______|_______| * | | | | | | * 3 | | A | | B | | * ___|_______|_______|_______|_______|_______| * | | | | | | * 4 | | | | | Z | * ___|_______|_______|_______|_______|_______| * | | | | | | * 5 | 1,400 | | 250 | | | * ___|_______|_______|_______|_______|_______| * * </pre> * * Then, the resulting CSV file will contain the following lines (records); * <pre> * 1,2,3,4,5 * ,,,, * ,A,,B, * ,,,,Z * "1,400",,250,, * </pre><p> * Typically, the comma is used to separate each of the fields that, together, * constitute a single record or line within the CSV file. This is not however * a hard and fast rule and so this class allows the user to determine which * character is used as the field separator and assumes the comma if none other * is specified. * </p><p> * If a field contains the separator then it will be escaped. If the file should * obey Excel's CSV formatting rules, then the field will be surrounded with * speech marks whilst if it should obey UNIX conventions, each occurrence of * the separator will be preceded by the backslash character. * </p><p> * If a field contains an end of line (EOL) character then it too will be * escaped. If the file should obey Excel's CSV formatting rules then the field * will again be surrounded by speech marks. On the other hand, if the file * should follow UNIX conventions then a single backslash will precede the * EOL character. There is no single applicable standard for UNIX and some * applications replace the CR with \r and the LF with \n but this class will * not do so. * </p><p> * If the field contains double quotes then that character will be escaped. It * seems as though UNIX does not define a standard for this whilst Excel does. * Should the CSV file have to obey Excel's formatting rules then the speech * mark character will be escaped with a second set of speech marks. Finally, an * enclosing set of speech marks will also surround the entire field. Thus, if * the following line of text appeared in a cell - "Hello" he said - it would * look like this when converted into a field within a CSV file - """Hello"" he * said". * </p><p> * Finally, it is worth noting that talk of CSV 'standards' is really slightly * misleading as there is no such thing. It may well be that the code in this * class has to be modified to produce files to suit a specific application * or requirement. * </p> * * @version 1.00 9th April 2010 * 1.10 13th April 2010 - Added support for processing all Excel * workbooks in a folder along with the ability * to specify a field separator character. * 2.00 14th April 2010 - Added support for embedded characters; the * field separator, EOL and double quotes or * speech marks. In addition, gave the client * the ability to select how these are handled, * either obeying Excel's or UNIX formatting * conventions. */ @SuppressWarnings({"java:S106","java:S4823","java:S1192"}) public class ToCSV { private static final Logger LOG = PoiLogManager.getLogger(ToCSV.class); private Workbook workbook; private ArrayList<ArrayList<String>> csvData; private int maxRowWidth; private int formattingConvention; private DataFormatter formatter; private FormulaEvaluator evaluator; private String separator; private static final String CSV_FILE_EXTENSION = ".csv"; private static final String DEFAULT_SEPARATOR = ","; /** * Identifies that the CSV file should obey Excel's formatting conventions * with regard to escaping certain embedded characters - the field separator, * speech mark and end of line (EOL) character */ public static final int EXCEL_STYLE_ESCAPING = 0; /** * Identifies that the CSV file should obey UNIX formatting conventions * with regard to escaping certain embedded characters - the field separator * and end of line (EOL) character */ public static final int UNIX_STYLE_ESCAPING = 1; /** * Process the contents of a folder, convert the contents of each Excel * workbook into CSV format and save the resulting file to the specified * folder using the same name as the original workbook with the .xls or * .xlsx extension replaced by .csv. This method will ensure that the * CSV file created contains the comma field separator and that embedded * characters such as the field separator, the EOL and double quotes are * escaped in accordance with Excel's convention. * * @param strSource An instance of the String class that encapsulates the * name of and path to either a folder containing those Excel * workbook(s) or the name of and path to an individual Excel workbook * that is/are to be converted. * @param strDestination An instance of the String class encapsulating the * name of and path to a folder that will contain the resulting CSV * files. * @throws java.io.FileNotFoundException Thrown if any file cannot be located * on the filesystem during processing. * @throws java.io.IOException Thrown if the filesystem encounters any * problems during processing. * @throws java.lang.IllegalArgumentException Thrown if the values passed * to the strSource parameter refers to a file or folder that does not * exist or if the value passed to the strDestination paramater refers * to a folder that does not exist or simply does not refer to a * folder. */ public void convertExcelToCSV(String strSource, String strDestination) throws FileNotFoundException, IOException, IllegalArgumentException { // Simply chain the call to the overloaded convertExcelToCSV(String, // String, String, int) method, pass the default separator and ensure // that certain embedded characters are escaped in accordance with // Excel's formatting conventions this.convertExcelToCSV(strSource, strDestination, ToCSV.DEFAULT_SEPARATOR, ToCSV.EXCEL_STYLE_ESCAPING); } /** * Process the contents of a folder, convert the contents of each Excel * workbook into CSV format and save the resulting file to the specified * folder using the same name as the original workbook with the .xls or * .xlsx extension replaced by .csv. This method allows the client to * define the field separator but will ensure that embedded characters such * as the field separator, the EOL and double quotes are escaped in * accordance with Excel's convention. * * @param strSource An instance of the String class that encapsulates the * name of and path to either a folder containing those Excel * workbook(s) or the name of and path to an individual Excel workbook * that is/are to be converted. * @param strDestination An instance of the String class encapsulating the * name of and path to a folder that will contain the resulting CSV * files. * @param separator An instance of the String class that encapsulates the * character or characters the client wishes to use as the field * separator. * @throws java.io.FileNotFoundException Thrown if any file cannot be located * on the filesystem during processing. * @throws java.io.IOException Thrown if the filesystem encounters any * problems during processing. * @throws java.lang.IllegalArgumentException Thrown if the values passed * to the strSource parameter refers to a file or folder that does not * exist or if the value passed to the strDestination paramater refers * to a folder that does not exist or simply does not refer to a * folder. */ public void convertExcelToCSV(String strSource, String strDestination, String separator) throws FileNotFoundException, IOException, IllegalArgumentException { // Simply chain the call to the overloaded convertExcelToCSV(String, // String, String, int) method and ensure that certain embedded // characters are escaped in accordance with Excel's formatting // conventions this.convertExcelToCSV(strSource, strDestination, separator, ToCSV.EXCEL_STYLE_ESCAPING); } /** * Process the contents of a folder, convert the contents of each Excel * workbook into CSV format and save the resulting file to the specified * folder using the same name as the original workbook with the .xls or * .xlsx extension replaced by .csv * * @param strSource An instance of the String class that encapsulates the * name of and path to either a folder containing those Excel * workbook(s) or the name of and path to an individual Excel workbook * that is/are to be converted. * @param strDestination An instance of the String class encapsulating the name * of and path to a folder that will contain the resulting CSV files. * @param formattingConvention A primitive int whose value will determine * whether certain embedded characters should be escaped in accordance * with Excel's or UNIX formatting conventions. Two constants are * defined to support this option; ToCSV.EXCEL_STYLE_ESCAPING and * ToCSV.UNIX_STYLE_ESCAPING * @param separator An instance of the String class encapsulating the * characters or characters that should be used to separate items * on a line within the CSV file. * @throws java.io.FileNotFoundException Thrown if any file cannot be located * on the filesystem during processing. * @throws java.io.IOException Thrown if the filesystem encounters any * problems during processing. * @throws java.lang.IllegalArgumentException Thrown if the values passed * to the strSource parameter refers to a file or folder that does not * exist, if the value passed to the strDestination paramater refers * to a folder that does not exist, if the value passed to the * strDestination parameter does not refer to a folder or if the * value passed to the formattingConvention parameter is other than * one of the values defined by the constants ToCSV.EXCEL_STYLE_ESCAPING * and ToCSV.UNIX_STYLE_ESCAPING. */ public void convertExcelToCSV(String strSource, String strDestination, String separator, int formattingConvention) throws FileNotFoundException, IOException, IllegalArgumentException { // Check that the source file/folder exists. File source = new File(strSource); if(!source.exists()) { throw new IllegalArgumentException("The source for the Excel " + "file(s) cannot be found at " + source); } // Ensure thaat the folder the user has chosen to save the CSV files // away into firstly exists and secondly is a folder rather than, for // instance, a data file. File destination = new File(strDestination); if(!destination.exists()) { throw new IllegalArgumentException("The destination directory " + destination + " for the " + "converted CSV file(s) does not exist."); } if(!destination.isDirectory()) { throw new IllegalArgumentException("The destination " + destination + " for the CSV " + "file(s) is not a directory/folder."); } // Ensure the value passed to the formattingConvention parameter is // within range. if(formattingConvention != ToCSV.EXCEL_STYLE_ESCAPING && formattingConvention != ToCSV.UNIX_STYLE_ESCAPING) { throw new IllegalArgumentException("The value passed to the " + "formattingConvention parameter is out of range: " + formattingConvention + ", expecting one of " + ToCSV.EXCEL_STYLE_ESCAPING + " or " + ToCSV.UNIX_STYLE_ESCAPING); } // Copy the spearator character and formatting convention into local // variables for use in other methods. this.separator = separator; this.formattingConvention = formattingConvention; // Check to see if the sourceFolder variable holds a reference to // a file or a folder full of files. final File[] filesList; if(source.isDirectory()) { // Get a list of all of the Excel spreadsheet files (workbooks) in // the source folder/directory filesList = source.listFiles(new ExcelFilenameFilter()); } else { // Assume that it must be a file handle - although there are other // options the code should perhaps check - and store the reference // into the filesList variable. filesList = new File[]{source}; } // Step through each of the files in the source folder and for each // open the workbook, convert its contents to CSV format and then // save the resulting file away into the folder specified by the // contents of the destination variable. Note that the name of the // csv file will be created by taking the name of the Excel file, // removing the extension and replacing it with .csv. Note that there // is one drawback with this approach; if the folder holding the files // contains two workbooks whose names match but one is a binary file // (.xls) and the other a SpreadsheetML file (.xlsx), then the names // for both CSV files will be identical and one CSV file will, // therefore, over-write the other. if (filesList != null) { for(File excelFile : filesList) { // Open the workbook this.openWorkbook(excelFile); // Convert its contents into a CSV file this.convertToCSV(); // Build the name of the csv folder from that of the Excel workbook. // Simply replace the .xls or .xlsx file extension with .csv String destinationFilename = excelFile.getName(); destinationFilename = destinationFilename.substring( 0, destinationFilename.lastIndexOf('.')) + ToCSV.CSV_FILE_EXTENSION; // Save the CSV file away using the newly constricted file name // and to the specified directory. this.saveCSVFile(new File(destination, destinationFilename)); } } } /** * Open an Excel workbook ready for conversion. * * @param file An instance of the File class that encapsulates a handle * to a valid Excel workbook. Note that the workbook can be in * either binary (.xls) or SpreadsheetML (.xlsx) format. * @throws java.io.FileNotFoundException Thrown if the file cannot be located. * @throws java.io.IOException Thrown if a problem occurs in the file system. */ private void openWorkbook(File file) throws FileNotFoundException, IOException { System.out.println("Opening workbook [" + file.getName() + "]"); try (FileInputStream fis = new FileInputStream(file)) { // Open the workbook and then create the FormulaEvaluator and // DataFormatter instances that will be needed to, respectively, // force evaluation of forumlae found in cells and create a // formatted String encapsulating the cells contents. this.workbook = WorkbookFactory.create(fis); this.evaluator = this.workbook.getCreationHelper().createFormulaEvaluator(); this.formatter = new DataFormatter(true); } } /** * Called to convert the contents of the currently opened workbook into * a CSV file. */ private void convertToCSV() { Sheet sheet; Row row; int lastRowNum; this.csvData = new ArrayList<>(); System.out.println("Converting files contents to CSV format."); // Discover how many sheets there are in the workbook.... int numSheets = this.workbook.getNumberOfSheets(); // and then iterate through them. for(int i = 0; i < numSheets; i++) { // Get a reference to a sheet and check to see if it contains // any rows. sheet = this.workbook.getSheetAt(i); if(sheet.getPhysicalNumberOfRows() > 0) { // Note down the index number of the bottom-most row and // then iterate through all of the rows on the sheet starting // from the very first row - number 1 - even if it is missing. // Recover a reference to the row and then call another method // which will strip the data from the cells and build lines // for inclusion in the resylting CSV file. lastRowNum = sheet.getLastRowNum(); for(int j = 0; j <= lastRowNum; j++) { row = sheet.getRow(j); this.rowToCSV(row); } } } } /** * Called to actually save the data recovered from the Excel workbook * as a CSV file. * * @param file An instance of the File class that encapsulates a handle * referring to the CSV file. * @throws java.io.IOException Thrown to indicate and error occurred in the * underylying file system. */ private void saveCSVFile(File file) throws IOException { ArrayList<String> line; StringBuilder buffer; String csvLineElement; // Open a writer onto the CSV file. try (BufferedWriter bw = Files.newBufferedWriter(file.toPath(), StandardCharsets.ISO_8859_1)) { System.out.println("Saving the CSV file [" + file.getName() + "]"); // Step through the elements of the ArrayList that was used to hold // all of the data recovered from the Excel workbooks' sheets, rows // and cells. for(int i = 0; i < this.csvData.size(); i++) { buffer = new StringBuilder(); // Get an element from the ArrayList that contains the data for // the workbook. This element will itself be an ArrayList // containing Strings and each String will hold the data recovered // from a single cell. The for() loop is used to recover elements // from this 'row' ArrayList one at a time and to write the Strings // away to a StringBuilder thus assembling a single line for inclusion // in the CSV file. If a row was empty or if it was short, then // the ArrayList that contains its data will also be shorter than // some of the others. Therefore, it is necessary to check within // the for loop to ensure that the ArrayList contains data to be // processed. If it does, then an element will be recovered and // appended to the StringBuilder. line = this.csvData.get(i); for(int j = 0; j < this.maxRowWidth; j++) { if(line.size() > j) { csvLineElement = line.get(j); if(csvLineElement != null) { buffer.append(this.escapeEmbeddedCharacters( csvLineElement)); } } if(j < (this.maxRowWidth - 1)) { buffer.append(this.separator); } } // Once the line is built, write it away to the CSV file. bw.write(buffer.toString().trim()); // Condition the inclusion of new line characters so as to // avoid an additional, superfluous, new line at the end of // the file. if(i < (this.csvData.size() - 1)) { bw.newLine(); } } } } /** * Called to convert a row of cells into a line of data that can later be * output to the CSV file. * * @param row An instance of either the HSSFRow or XSSFRow classes that * encapsulates information about a row of cells recovered from * an Excel workbook. */ private void rowToCSV(Row row) { Cell cell; int lastCellNum; ArrayList<String> csvLine = new ArrayList<>(); // Check to ensure that a row was recovered from the sheet as it is // possible that one or more rows between other populated rows could be // missing - blank. If the row does contain cells then... if(row != null) { // Get the index for the right most cell on the row and then // step along the row from left to right recovering the contents // of each cell, converting that into a formatted String and // then storing the String into the csvLine ArrayList. lastCellNum = row.getLastCellNum(); for(int i = 0; i <= lastCellNum; i++) { cell = row.getCell(i); if(cell == null) { csvLine.add(""); } else { if(cell.getCellType() != CellType.FORMULA) { csvLine.add(this.formatter.formatCellValue(cell)); } else { csvLine.add(this.formatter.formatCellValue(cell, this.evaluator)); } } } // Make a note of the index number of the right most cell. This value // will later be used to ensure that the matrix of data in the CSV file // is square. if(lastCellNum > this.maxRowWidth) { this.maxRowWidth = lastCellNum; } } this.csvData.add(csvLine); } /** * Checks to see whether the field - which consists of the formatted * contents of an Excel worksheet cell encapsulated within a String - contains * any embedded characters that must be escaped. The method is able to * comply with either Excel's or UNIX formatting conventions in the * following manner; * * With regard to UNIX conventions, if the field contains any embedded * field separator or EOL characters they will each be escaped by prefixing * a leading backspace character. These are the only changes that have yet * emerged following some research as being required. * * Excel has other embedded character escaping requirements, some that emerged * from empirical testing, other through research. Firstly, with regards to * any embedded speech marks ("), each occurrence should be escaped with * another speech mark and the whole field then surrounded with speech marks. * Thus if a field holds <em>"Hello" he said</em> then it should be modified * to appear as <em>"""Hello"" he said"</em>. Furthermore, if the field * contains either embedded separator or EOL characters, it should also * be surrounded with speech marks. As a result <em>1,400</em> would become * <em>"1,400"</em> assuming that the comma is the required field separator. * This has one consequence in, if a field contains embedded speech marks * and embedded separator characters, checks for both are not required as the * additional set of speech marks that should be placed around ay field * containing embedded speech marks will also account for the embedded * separator. * * It is worth making one further note with regard to embedded EOL * characters. If the data in a worksheet is exported as a CSV file using * Excel itself, then the field will be surounded with speech marks. If the * resulting CSV file is then re-imports into another worksheet, the EOL * character will result in the original simgle field occupying more than * one cell. This same 'feature' is replicated in this classes behaviour. * * @param field An instance of the String class encapsulating the formatted * contents of a cell on an Excel worksheet. * @return A String that encapsulates the formatted contents of that * Excel worksheet cell but with any embedded separator, EOL or * speech mark characters correctly escaped. */ private String escapeEmbeddedCharacters(String field) { StringBuilder buffer; // If the fields contents should be formatted to confrom with Excel's // convention.... if(this.formattingConvention == ToCSV.EXCEL_STYLE_ESCAPING) { // Firstly, check if there are any speech marks (") in the field; // each occurrence must be escaped with another set of spech marks // and then the entire field should be enclosed within another // set of speech marks. Thus, "Yes" he said would become // """Yes"" he said" if(field.contains("\"")) { buffer = new StringBuilder(field.replace("\"", "\\\"\\\"")); buffer.insert(0, "\""); buffer.append("\""); } else { // If the field contains either embedded separator or EOL // characters, then escape the whole field by surrounding it // with speech marks. buffer = new StringBuilder(field); if((buffer.indexOf(this.separator)) > -1 || (buffer.indexOf("\n")) > -1) { buffer.insert(0, "\""); buffer.append("\""); } } return(buffer.toString().trim()); } // The only other formatting convention this class obeys is the UNIX one // where any occurrence of the field separator or EOL character will // be escaped by preceding it with a backslash. else { if(field.contains(this.separator)) { field = field.replaceAll(this.separator, ("\\\\" + this.separator)); } if(field.contains("\n")) { field = field.replace("\n", "\\\\\n"); } return(field); } } /** * The main() method contains code that demonstrates how to use the class. * * @param args An array containing zero, one or more elements all of type * String. Each element will encapsulate an argument specified by the * user when running the program from the command prompt. */ public static void main(String[] args) { // Check the number of arguments passed to the main method. There // must be two, three or four; the name of and path to either the folder // containing the Excel files or an individual Excel workbook that is/are // to be converted, the name of and path to the folder to which the CSV // files should be written, - optionally - the separator character // that should be used to separate individual items (fields) on the // lines (records) of the CSV file and - again optionally - an integer // that idicates whether the CSV file ought to obey Excel's or UNIX // convnetions with regard to formatting fields that contain embedded // separator, Speech mark or EOL character(s). // // Note that the names of the CSV files will be derived from those // of the Excel file(s). Put simply the .xls or .xlsx extension will be // replaced with .csv. Therefore, if the source folder contains files // with matching names but different extensions - Test.xls and Test.xlsx // for example - then the CSV file generated from one will overwrite // that generated from the other. ToCSV converter; boolean converted = true; long startTime = System.currentTimeMillis(); try { converter = new ToCSV(); if(args.length == 2) { // Just the Source File/Folder and Destination Folder were // passed to the main method. converter.convertExcelToCSV(args[0], args[1]); } else if(args.length == 3) { // The Source File/Folder, Destination Folder and Separator // were passed to the main method. converter.convertExcelToCSV(args[0], args[1], args[2]); } else if(args.length == 4) { // The Source File/Folder, Destination Folder, Separator and // Formatting Convention were passed to the main method. converter.convertExcelToCSV(args[0], args[1], args[2], Integer.parseInt(args[3])); } else { // None or more than four parameters were passed so display //a Usage message. System.out.println("Usage: java ToCSV [Source File/Folder] " + "[Destination Folder] [Separator] [Formatting Convention]\n" + "\tSource File/Folder\tThis argument should contain the name of and\n" + "\t\t\t\tpath to either a single Excel workbook or a\n" + "\t\t\t\tfolder containing one or more Excel workbooks.\n" + "\tDestination Folder\tThe name of and path to the folder that the\n" + "\t\t\t\tCSV files should be written out into. The\n" + "\t\t\t\tfolder must exist before running the ToCSV\n" + "\t\t\t\tcode as it will not check for or create it.\n" + "\tSeparator\t\tOptional. The character or characters that\n" + "\t\t\t\tshould be used to separate fields in the CSV\n" + "\t\t\t\trecord. If no value is passed then the comma\n" + "\t\t\t\twill be assumed.\n" + "\tFormatting Convention\tOptional. This argument can take one of two\n" + "\t\t\t\tvalues. Passing 0 (zero) will result in a CSV\n" + "\t\t\t\tfile that obeys Excel's formatting conventions\n" + "\t\t\t\twhilst passing 1 (one) will result in a file\n" + "\t\t\t\tthat obeys UNIX formatting conventions. If no\n" + "\t\t\t\tvalue is passed, then the CSV file produced\n" + "\t\t\t\twill obey Excel's formatting conventions."); converted = false; } } // It is not wise to have such a wide catch clause - Exception is very // close to being at the top of the inheritance hierarchy - though it // will suffice for this example as it is really not possible to recover // easily from an exceptional set of circumstances at this point in the // program. It should however, ideally be replaced with one or more // catch clauses optimised to handle more specific problems. catch(Exception ex) { LOG.atWarn().withThrowable(ex).log("Unexpected exception"); converted = false; } if (converted) { System.out.println("Conversion took " + ((System.currentTimeMillis() - startTime)/1000) + " seconds"); } } /** * An instance of this class can be used to control the files returned * be a call to the listFiles() method when made on an instance of the * File class and that object refers to a folder/directory */ static class ExcelFilenameFilter implements FilenameFilter { /** * Determine those files that will be returned by a call to the * listFiles() method. In this case, the name of the file must end with * either of the following two extension; '.xls' or '.xlsx'. For the * future, it is very possible to parameterise this and allow the * containing class to pass, for example, an array of Strings to this * class on instantiation. Each element in that array could encapsulate * a valid file extension - '.xls', '.xlsx', '.xlt', '.xlst', etc. These * could then be used to control which files were returned by the call * to the listFiles() method. * * @param file An instance of the File class that encapsulates a handle * referring to the folder/directory that contains the file. * @param name An instance of the String class that encapsulates the * name of the file. * @return A boolean value that indicates whether the file should be * included in the array retirned by the call to the listFiles() * method. In this case true will be returned if the name of the * file ends with either '.xls' or '.xlsx' and false will be * returned in all other instances. */ @Override public boolean accept(File file, String name) { return(name.endsWith(".xls") || name.endsWith(".xlsx")); } } }
googleapis/google-cloud-java
37,051
java-telcoautomation/proto-google-cloud-telcoautomation-v1/src/main/java/com/google/cloud/telcoautomation/v1/ListDeploymentsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/telcoautomation/v1/telcoautomation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.telcoautomation.v1; /** * * * <pre> * Request object for `ListDeployments`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1.ListDeploymentsRequest} */ public final class ListDeploymentsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1.ListDeploymentsRequest) ListDeploymentsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListDeploymentsRequest.newBuilder() to construct. private ListDeploymentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDeploymentsRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDeploymentsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1.ListDeploymentsRequest.class, com.google.cloud.telcoautomation.v1.ListDeploymentsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of parent orchestration cluster resource. * Format should be - * "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of parent orchestration cluster resource. * Format should be - * "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filtering only supports equality on deployment state. * It should be in the form: "state = DRAFT". `OR` operator can be used to * get response for multiple states. e.g. "state = DRAFT OR state = APPLIED". * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Filtering only supports equality on deployment state. * It should be in the form: "state = DRAFT". `OR` operator can be used to * get response for multiple states. e.g. "state = DRAFT OR state = APPLIED". * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. The maximum number of deployments to return per page. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The page token, received from a previous ListDeployments call. * It can be provided to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. The page token, received from a previous ListDeployments call. * It can be provided to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.telcoautomation.v1.ListDeploymentsRequest)) { return super.equals(obj); } com.google.cloud.telcoautomation.v1.ListDeploymentsRequest other = (com.google.cloud.telcoautomation.v1.ListDeploymentsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.telcoautomation.v1.ListDeploymentsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request object for `ListDeployments`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1.ListDeploymentsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1.ListDeploymentsRequest) com.google.cloud.telcoautomation.v1.ListDeploymentsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1.ListDeploymentsRequest.class, com.google.cloud.telcoautomation.v1.ListDeploymentsRequest.Builder.class); } // Construct using com.google.cloud.telcoautomation.v1.ListDeploymentsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsRequest_descriptor; } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsRequest getDefaultInstanceForType() { return com.google.cloud.telcoautomation.v1.ListDeploymentsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsRequest build() { com.google.cloud.telcoautomation.v1.ListDeploymentsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsRequest buildPartial() { com.google.cloud.telcoautomation.v1.ListDeploymentsRequest result = new com.google.cloud.telcoautomation.v1.ListDeploymentsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.telcoautomation.v1.ListDeploymentsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.telcoautomation.v1.ListDeploymentsRequest) { return mergeFrom((com.google.cloud.telcoautomation.v1.ListDeploymentsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.telcoautomation.v1.ListDeploymentsRequest other) { if (other == com.google.cloud.telcoautomation.v1.ListDeploymentsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of parent orchestration cluster resource. * Format should be - * "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of parent orchestration cluster resource. * Format should be - * "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of parent orchestration cluster resource. * Format should be - * "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of parent orchestration cluster resource. * Format should be - * "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of parent orchestration cluster resource. * Format should be - * "projects/{project_id}/locations/{location_name}/orchestrationClusters/{orchestration_cluster}". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filtering only supports equality on deployment state. * It should be in the form: "state = DRAFT". `OR` operator can be used to * get response for multiple states. e.g. "state = DRAFT OR state = APPLIED". * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Filtering only supports equality on deployment state. * It should be in the form: "state = DRAFT". `OR` operator can be used to * get response for multiple states. e.g. "state = DRAFT OR state = APPLIED". * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Filtering only supports equality on deployment state. * It should be in the form: "state = DRAFT". `OR` operator can be used to * get response for multiple states. e.g. "state = DRAFT OR state = APPLIED". * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Filtering only supports equality on deployment state. * It should be in the form: "state = DRAFT". `OR` operator can be used to * get response for multiple states. e.g. "state = DRAFT OR state = APPLIED". * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. Filtering only supports equality on deployment state. * It should be in the form: "state = DRAFT". `OR` operator can be used to * get response for multiple states. e.g. "state = DRAFT OR state = APPLIED". * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of deployments to return per page. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of deployments to return per page. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of deployments to return per page. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The page token, received from a previous ListDeployments call. * It can be provided to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The page token, received from a previous ListDeployments call. * It can be provided to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The page token, received from a previous ListDeployments call. * It can be provided to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. The page token, received from a previous ListDeployments call. * It can be provided to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. The page token, received from a previous ListDeployments call. * It can be provided to retrieve the subsequent page. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1.ListDeploymentsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1.ListDeploymentsRequest) private static final com.google.cloud.telcoautomation.v1.ListDeploymentsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1.ListDeploymentsRequest(); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDeploymentsRequest> PARSER = new com.google.protobuf.AbstractParser<ListDeploymentsRequest>() { @java.lang.Override public ListDeploymentsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDeploymentsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDeploymentsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,126
java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/IndexServiceClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.aiplatform.v1; import static com.google.cloud.aiplatform.v1.IndexServiceClient.ListIndexesPagedResponse; import static com.google.cloud.aiplatform.v1.IndexServiceClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.iam.v1.AuditConfig; import com.google.iam.v1.Binding; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.GetPolicyOptions; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import com.google.protobuf.Value; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class IndexServiceClientTest { private static MockIAMPolicy mockIAMPolicy; private static MockIndexService mockIndexService; private static MockLocations mockLocations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private IndexServiceClient client; @BeforeClass public static void startStaticServer() { mockIndexService = new MockIndexService(); mockLocations = new MockLocations(); mockIAMPolicy = new MockIAMPolicy(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockIndexService, mockLocations, mockIAMPolicy)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); IndexServiceSettings settings = IndexServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = IndexServiceClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void createIndexTest() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setMetadataSchemaUri("metadataSchemaUri781971868") .setMetadata(Value.newBuilder().setBoolValue(true).build()) .addAllDeployedIndexes(new ArrayList<DeployedIndexRef>()) .setEtag("etag3123477") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setIndexStats(IndexStats.newBuilder().build()) .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setSatisfiesPzs(true) .setSatisfiesPzi(true) .build(); Operation resultOperation = Operation.newBuilder() .setName("createIndexTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockIndexService.addResponse(resultOperation); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); Index index = Index.newBuilder().build(); Index actualResponse = client.createIndexAsync(parent, index).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateIndexRequest actualRequest = ((CreateIndexRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(index, actualRequest.getIndex()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createIndexExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); Index index = Index.newBuilder().build(); client.createIndexAsync(parent, index).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void createIndexTest2() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setMetadataSchemaUri("metadataSchemaUri781971868") .setMetadata(Value.newBuilder().setBoolValue(true).build()) .addAllDeployedIndexes(new ArrayList<DeployedIndexRef>()) .setEtag("etag3123477") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setIndexStats(IndexStats.newBuilder().build()) .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setSatisfiesPzs(true) .setSatisfiesPzi(true) .build(); Operation resultOperation = Operation.newBuilder() .setName("createIndexTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockIndexService.addResponse(resultOperation); String parent = "parent-995424086"; Index index = Index.newBuilder().build(); Index actualResponse = client.createIndexAsync(parent, index).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateIndexRequest actualRequest = ((CreateIndexRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(index, actualRequest.getIndex()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createIndexExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { String parent = "parent-995424086"; Index index = Index.newBuilder().build(); client.createIndexAsync(parent, index).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void getIndexTest() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setMetadataSchemaUri("metadataSchemaUri781971868") .setMetadata(Value.newBuilder().setBoolValue(true).build()) .addAllDeployedIndexes(new ArrayList<DeployedIndexRef>()) .setEtag("etag3123477") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setIndexStats(IndexStats.newBuilder().build()) .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setSatisfiesPzs(true) .setSatisfiesPzi(true) .build(); mockIndexService.addResponse(expectedResponse); IndexName name = IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]"); Index actualResponse = client.getIndex(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIndexRequest actualRequest = ((GetIndexRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIndexExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { IndexName name = IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]"); client.getIndex(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIndexTest2() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setMetadataSchemaUri("metadataSchemaUri781971868") .setMetadata(Value.newBuilder().setBoolValue(true).build()) .addAllDeployedIndexes(new ArrayList<DeployedIndexRef>()) .setEtag("etag3123477") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setIndexStats(IndexStats.newBuilder().build()) .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setSatisfiesPzs(true) .setSatisfiesPzi(true) .build(); mockIndexService.addResponse(expectedResponse); String name = "name3373707"; Index actualResponse = client.getIndex(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIndexRequest actualRequest = ((GetIndexRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIndexExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { String name = "name3373707"; client.getIndex(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listIndexesTest() throws Exception { Index responsesElement = Index.newBuilder().build(); ListIndexesResponse expectedResponse = ListIndexesResponse.newBuilder() .setNextPageToken("") .addAllIndexes(Arrays.asList(responsesElement)) .build(); mockIndexService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListIndexesPagedResponse pagedListResponse = client.listIndexes(parent); List<Index> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getIndexesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListIndexesRequest actualRequest = ((ListIndexesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listIndexesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listIndexes(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listIndexesTest2() throws Exception { Index responsesElement = Index.newBuilder().build(); ListIndexesResponse expectedResponse = ListIndexesResponse.newBuilder() .setNextPageToken("") .addAllIndexes(Arrays.asList(responsesElement)) .build(); mockIndexService.addResponse(expectedResponse); String parent = "parent-995424086"; ListIndexesPagedResponse pagedListResponse = client.listIndexes(parent); List<Index> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getIndexesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListIndexesRequest actualRequest = ((ListIndexesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listIndexesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { String parent = "parent-995424086"; client.listIndexes(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateIndexTest() throws Exception { Index expectedResponse = Index.newBuilder() .setName(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setMetadataSchemaUri("metadataSchemaUri781971868") .setMetadata(Value.newBuilder().setBoolValue(true).build()) .addAllDeployedIndexes(new ArrayList<DeployedIndexRef>()) .setEtag("etag3123477") .putAllLabels(new HashMap<String, String>()) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setIndexStats(IndexStats.newBuilder().build()) .setEncryptionSpec(EncryptionSpec.newBuilder().build()) .setSatisfiesPzs(true) .setSatisfiesPzi(true) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateIndexTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockIndexService.addResponse(resultOperation); Index index = Index.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); Index actualResponse = client.updateIndexAsync(index, updateMask).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateIndexRequest actualRequest = ((UpdateIndexRequest) actualRequests.get(0)); Assert.assertEquals(index, actualRequest.getIndex()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateIndexExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { Index index = Index.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateIndexAsync(index, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteIndexTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteIndexTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockIndexService.addResponse(resultOperation); IndexName name = IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]"); client.deleteIndexAsync(name).get(); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteIndexRequest actualRequest = ((DeleteIndexRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteIndexExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { IndexName name = IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]"); client.deleteIndexAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteIndexTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteIndexTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockIndexService.addResponse(resultOperation); String name = "name3373707"; client.deleteIndexAsync(name).get(); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteIndexRequest actualRequest = ((DeleteIndexRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteIndexExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { String name = "name3373707"; client.deleteIndexAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void upsertDatapointsTest() throws Exception { UpsertDatapointsResponse expectedResponse = UpsertDatapointsResponse.newBuilder().build(); mockIndexService.addResponse(expectedResponse); UpsertDatapointsRequest request = UpsertDatapointsRequest.newBuilder() .setIndex(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .addAllDatapoints(new ArrayList<IndexDatapoint>()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); UpsertDatapointsResponse actualResponse = client.upsertDatapoints(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpsertDatapointsRequest actualRequest = ((UpsertDatapointsRequest) actualRequests.get(0)); Assert.assertEquals(request.getIndex(), actualRequest.getIndex()); Assert.assertEquals(request.getDatapointsList(), actualRequest.getDatapointsList()); Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void upsertDatapointsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { UpsertDatapointsRequest request = UpsertDatapointsRequest.newBuilder() .setIndex(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .addAllDatapoints(new ArrayList<IndexDatapoint>()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); client.upsertDatapoints(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void removeDatapointsTest() throws Exception { RemoveDatapointsResponse expectedResponse = RemoveDatapointsResponse.newBuilder().build(); mockIndexService.addResponse(expectedResponse); RemoveDatapointsRequest request = RemoveDatapointsRequest.newBuilder() .setIndex(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .addAllDatapointIds(new ArrayList<String>()) .build(); RemoveDatapointsResponse actualResponse = client.removeDatapoints(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIndexService.getRequests(); Assert.assertEquals(1, actualRequests.size()); RemoveDatapointsRequest actualRequest = ((RemoveDatapointsRequest) actualRequests.get(0)); Assert.assertEquals(request.getIndex(), actualRequest.getIndex()); Assert.assertEquals(request.getDatapointIdsList(), actualRequest.getDatapointIdsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void removeDatapointsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIndexService.addException(exception); try { RemoveDatapointsRequest request = RemoveDatapointsRequest.newBuilder() .setIndex(IndexName.of("[PROJECT]", "[LOCATION]", "[INDEX]").toString()) .addAllDatapointIds(new ArrayList<String>()) .build(); client.removeDatapoints(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockLocations.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertEquals(request.getFilter(), actualRequest.getFilter()); Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize()); Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listLocationsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockLocations.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getLocationExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockIAMPolicy.addResponse(expectedResponse); SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); Policy actualResponse = client.setIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(request.getResource(), actualRequest.getResource()); Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy()); Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setIamPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIAMPolicy.addException(exception); try { SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName( "[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); client.setIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockIAMPolicy.addResponse(expectedResponse); GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); Policy actualResponse = client.getIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(request.getResource(), actualRequest.getResource()); Assert.assertEquals(request.getOptions(), actualRequest.getOptions()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIamPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIAMPolicy.addException(exception); try { GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName( "[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); client.getIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void testIamPermissionsTest() throws Exception { TestIamPermissionsResponse expectedResponse = TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build(); mockIAMPolicy.addResponse(expectedResponse); TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .addAllPermissions(new ArrayList<String>()) .build(); TestIamPermissionsResponse actualResponse = client.testIamPermissions(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests(); Assert.assertEquals(1, actualRequests.size()); TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0)); Assert.assertEquals(request.getResource(), actualRequest.getResource()); Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void testIamPermissionsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIAMPolicy.addException(exception); try { TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName( "[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .addAllPermissions(new ArrayList<String>()) .build(); client.testIamPermissions(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
37,150
java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/UpdateConversationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1beta/conversational_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1beta; /** * * * <pre> * Request for UpdateConversation method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.UpdateConversationRequest} */ public final class UpdateConversationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.UpdateConversationRequest) UpdateConversationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateConversationRequest.newBuilder() to construct. private UpdateConversationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateConversationRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateConversationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateConversationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateConversationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest.class, com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest.Builder.class); } private int bitField0_; public static final int CONVERSATION_FIELD_NUMBER = 1; private com.google.cloud.discoveryengine.v1beta.Conversation conversation_; /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversation field is set. */ @java.lang.Override public boolean hasConversation() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversation. */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.Conversation getConversation() { return conversation_ == null ? com.google.cloud.discoveryengine.v1beta.Conversation.getDefaultInstance() : conversation_; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ConversationOrBuilder getConversationOrBuilder() { return conversation_ == null ? com.google.cloud.discoveryengine.v1beta.Conversation.getDefaultInstance() : conversation_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getConversation()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getConversation()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest other = (com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest) obj; if (hasConversation() != other.hasConversation()) return false; if (hasConversation()) { if (!getConversation().equals(other.getConversation())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasConversation()) { hash = (37 * hash) + CONVERSATION_FIELD_NUMBER; hash = (53 * hash) + getConversation().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateConversation method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.UpdateConversationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.UpdateConversationRequest) com.google.cloud.discoveryengine.v1beta.UpdateConversationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateConversationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateConversationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest.class, com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest.Builder.class); } // Construct using // com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getConversationFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; conversation_ = null; if (conversationBuilder_ != null) { conversationBuilder_.dispose(); conversationBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateConversationRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest build() { com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest buildPartial() { com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest result = new com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.conversation_ = conversationBuilder_ == null ? conversation_ : conversationBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest other) { if (other == com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest.getDefaultInstance()) return this; if (other.hasConversation()) { mergeConversation(other.getConversation()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getConversationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.discoveryengine.v1beta.Conversation conversation_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Conversation, com.google.cloud.discoveryengine.v1beta.Conversation.Builder, com.google.cloud.discoveryengine.v1beta.ConversationOrBuilder> conversationBuilder_; /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversation field is set. */ public boolean hasConversation() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversation. */ public com.google.cloud.discoveryengine.v1beta.Conversation getConversation() { if (conversationBuilder_ == null) { return conversation_ == null ? com.google.cloud.discoveryengine.v1beta.Conversation.getDefaultInstance() : conversation_; } else { return conversationBuilder_.getMessage(); } } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversation(com.google.cloud.discoveryengine.v1beta.Conversation value) { if (conversationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } conversation_ = value; } else { conversationBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversation( com.google.cloud.discoveryengine.v1beta.Conversation.Builder builderForValue) { if (conversationBuilder_ == null) { conversation_ = builderForValue.build(); } else { conversationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeConversation(com.google.cloud.discoveryengine.v1beta.Conversation value) { if (conversationBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && conversation_ != null && conversation_ != com.google.cloud.discoveryengine.v1beta.Conversation.getDefaultInstance()) { getConversationBuilder().mergeFrom(value); } else { conversation_ = value; } } else { conversationBuilder_.mergeFrom(value); } if (conversation_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearConversation() { bitField0_ = (bitField0_ & ~0x00000001); conversation_ = null; if (conversationBuilder_ != null) { conversationBuilder_.dispose(); conversationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1beta.Conversation.Builder getConversationBuilder() { bitField0_ |= 0x00000001; onChanged(); return getConversationFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1beta.ConversationOrBuilder getConversationOrBuilder() { if (conversationBuilder_ != null) { return conversationBuilder_.getMessageOrBuilder(); } else { return conversation_ == null ? com.google.cloud.discoveryengine.v1beta.Conversation.getDefaultInstance() : conversation_; } } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Conversation, com.google.cloud.discoveryengine.v1beta.Conversation.Builder, com.google.cloud.discoveryengine.v1beta.ConversationOrBuilder> getConversationFieldBuilder() { if (conversationBuilder_ == null) { conversationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Conversation, com.google.cloud.discoveryengine.v1beta.Conversation.Builder, com.google.cloud.discoveryengine.v1beta.ConversationOrBuilder>( getConversation(), getParentForChildren(), isClean()); conversation_ = null; } return conversationBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1beta.Conversation] to update. * The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1beta.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.UpdateConversationRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.UpdateConversationRequest) private static final com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest(); } public static com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateConversationRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateConversationRequest>() { @java.lang.Override public UpdateConversationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateConversationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateConversationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateConversationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,114
java-document-ai/proto-google-cloud-document-ai-v1beta3/src/main/java/com/google/cloud/documentai/v1beta3/ListEvaluationsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1beta3/document_processor_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1beta3; /** * * * <pre> * The response from `ListEvaluations`. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.ListEvaluationsResponse} */ public final class ListEvaluationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.ListEvaluationsResponse) ListEvaluationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListEvaluationsResponse.newBuilder() to construct. private ListEvaluationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEvaluationsResponse() { evaluations_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEvaluationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_ListEvaluationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_ListEvaluationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.ListEvaluationsResponse.class, com.google.cloud.documentai.v1beta3.ListEvaluationsResponse.Builder.class); } public static final int EVALUATIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.documentai.v1beta3.Evaluation> evaluations_; /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.documentai.v1beta3.Evaluation> getEvaluationsList() { return evaluations_; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.documentai.v1beta3.EvaluationOrBuilder> getEvaluationsOrBuilderList() { return evaluations_; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ @java.lang.Override public int getEvaluationsCount() { return evaluations_.size(); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1beta3.Evaluation getEvaluations(int index) { return evaluations_.get(index); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1beta3.EvaluationOrBuilder getEvaluationsOrBuilder( int index) { return evaluations_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < evaluations_.size(); i++) { output.writeMessage(1, evaluations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < evaluations_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, evaluations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1beta3.ListEvaluationsResponse)) { return super.equals(obj); } com.google.cloud.documentai.v1beta3.ListEvaluationsResponse other = (com.google.cloud.documentai.v1beta3.ListEvaluationsResponse) obj; if (!getEvaluationsList().equals(other.getEvaluationsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEvaluationsCount() > 0) { hash = (37 * hash) + EVALUATIONS_FIELD_NUMBER; hash = (53 * hash) + getEvaluationsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1beta3.ListEvaluationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response from `ListEvaluations`. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.ListEvaluationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.ListEvaluationsResponse) com.google.cloud.documentai.v1beta3.ListEvaluationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_ListEvaluationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_ListEvaluationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.ListEvaluationsResponse.class, com.google.cloud.documentai.v1beta3.ListEvaluationsResponse.Builder.class); } // Construct using com.google.cloud.documentai.v1beta3.ListEvaluationsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (evaluationsBuilder_ == null) { evaluations_ = java.util.Collections.emptyList(); } else { evaluations_ = null; evaluationsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_ListEvaluationsResponse_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1beta3.ListEvaluationsResponse getDefaultInstanceForType() { return com.google.cloud.documentai.v1beta3.ListEvaluationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1beta3.ListEvaluationsResponse build() { com.google.cloud.documentai.v1beta3.ListEvaluationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1beta3.ListEvaluationsResponse buildPartial() { com.google.cloud.documentai.v1beta3.ListEvaluationsResponse result = new com.google.cloud.documentai.v1beta3.ListEvaluationsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.documentai.v1beta3.ListEvaluationsResponse result) { if (evaluationsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { evaluations_ = java.util.Collections.unmodifiableList(evaluations_); bitField0_ = (bitField0_ & ~0x00000001); } result.evaluations_ = evaluations_; } else { result.evaluations_ = evaluationsBuilder_.build(); } } private void buildPartial0(com.google.cloud.documentai.v1beta3.ListEvaluationsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1beta3.ListEvaluationsResponse) { return mergeFrom((com.google.cloud.documentai.v1beta3.ListEvaluationsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1beta3.ListEvaluationsResponse other) { if (other == com.google.cloud.documentai.v1beta3.ListEvaluationsResponse.getDefaultInstance()) return this; if (evaluationsBuilder_ == null) { if (!other.evaluations_.isEmpty()) { if (evaluations_.isEmpty()) { evaluations_ = other.evaluations_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEvaluationsIsMutable(); evaluations_.addAll(other.evaluations_); } onChanged(); } } else { if (!other.evaluations_.isEmpty()) { if (evaluationsBuilder_.isEmpty()) { evaluationsBuilder_.dispose(); evaluationsBuilder_ = null; evaluations_ = other.evaluations_; bitField0_ = (bitField0_ & ~0x00000001); evaluationsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEvaluationsFieldBuilder() : null; } else { evaluationsBuilder_.addAllMessages(other.evaluations_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.documentai.v1beta3.Evaluation m = input.readMessage( com.google.cloud.documentai.v1beta3.Evaluation.parser(), extensionRegistry); if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.add(m); } else { evaluationsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.documentai.v1beta3.Evaluation> evaluations_ = java.util.Collections.emptyList(); private void ensureEvaluationsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { evaluations_ = new java.util.ArrayList<com.google.cloud.documentai.v1beta3.Evaluation>(evaluations_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1beta3.Evaluation, com.google.cloud.documentai.v1beta3.Evaluation.Builder, com.google.cloud.documentai.v1beta3.EvaluationOrBuilder> evaluationsBuilder_; /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public java.util.List<com.google.cloud.documentai.v1beta3.Evaluation> getEvaluationsList() { if (evaluationsBuilder_ == null) { return java.util.Collections.unmodifiableList(evaluations_); } else { return evaluationsBuilder_.getMessageList(); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public int getEvaluationsCount() { if (evaluationsBuilder_ == null) { return evaluations_.size(); } else { return evaluationsBuilder_.getCount(); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1beta3.Evaluation getEvaluations(int index) { if (evaluationsBuilder_ == null) { return evaluations_.get(index); } else { return evaluationsBuilder_.getMessage(index); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder setEvaluations(int index, com.google.cloud.documentai.v1beta3.Evaluation value) { if (evaluationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEvaluationsIsMutable(); evaluations_.set(index, value); onChanged(); } else { evaluationsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder setEvaluations( int index, com.google.cloud.documentai.v1beta3.Evaluation.Builder builderForValue) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.set(index, builderForValue.build()); onChanged(); } else { evaluationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder addEvaluations(com.google.cloud.documentai.v1beta3.Evaluation value) { if (evaluationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEvaluationsIsMutable(); evaluations_.add(value); onChanged(); } else { evaluationsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder addEvaluations(int index, com.google.cloud.documentai.v1beta3.Evaluation value) { if (evaluationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEvaluationsIsMutable(); evaluations_.add(index, value); onChanged(); } else { evaluationsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder addEvaluations( com.google.cloud.documentai.v1beta3.Evaluation.Builder builderForValue) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.add(builderForValue.build()); onChanged(); } else { evaluationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder addEvaluations( int index, com.google.cloud.documentai.v1beta3.Evaluation.Builder builderForValue) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.add(index, builderForValue.build()); onChanged(); } else { evaluationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder addAllEvaluations( java.lang.Iterable<? extends com.google.cloud.documentai.v1beta3.Evaluation> values) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, evaluations_); onChanged(); } else { evaluationsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder clearEvaluations() { if (evaluationsBuilder_ == null) { evaluations_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { evaluationsBuilder_.clear(); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public Builder removeEvaluations(int index) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.remove(index); onChanged(); } else { evaluationsBuilder_.remove(index); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1beta3.Evaluation.Builder getEvaluationsBuilder(int index) { return getEvaluationsFieldBuilder().getBuilder(index); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1beta3.EvaluationOrBuilder getEvaluationsOrBuilder( int index) { if (evaluationsBuilder_ == null) { return evaluations_.get(index); } else { return evaluationsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public java.util.List<? extends com.google.cloud.documentai.v1beta3.EvaluationOrBuilder> getEvaluationsOrBuilderList() { if (evaluationsBuilder_ != null) { return evaluationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(evaluations_); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1beta3.Evaluation.Builder addEvaluationsBuilder() { return getEvaluationsFieldBuilder() .addBuilder(com.google.cloud.documentai.v1beta3.Evaluation.getDefaultInstance()); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1beta3.Evaluation.Builder addEvaluationsBuilder(int index) { return getEvaluationsFieldBuilder() .addBuilder(index, com.google.cloud.documentai.v1beta3.Evaluation.getDefaultInstance()); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1beta3.Evaluation evaluations = 1;</code> */ public java.util.List<com.google.cloud.documentai.v1beta3.Evaluation.Builder> getEvaluationsBuilderList() { return getEvaluationsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1beta3.Evaluation, com.google.cloud.documentai.v1beta3.Evaluation.Builder, com.google.cloud.documentai.v1beta3.EvaluationOrBuilder> getEvaluationsFieldBuilder() { if (evaluationsBuilder_ == null) { evaluationsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1beta3.Evaluation, com.google.cloud.documentai.v1beta3.Evaluation.Builder, com.google.cloud.documentai.v1beta3.EvaluationOrBuilder>( evaluations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); evaluations_ = null; } return evaluationsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.ListEvaluationsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.ListEvaluationsResponse) private static final com.google.cloud.documentai.v1beta3.ListEvaluationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.ListEvaluationsResponse(); } public static com.google.cloud.documentai.v1beta3.ListEvaluationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEvaluationsResponse> PARSER = new com.google.protobuf.AbstractParser<ListEvaluationsResponse>() { @java.lang.Override public ListEvaluationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEvaluationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEvaluationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1beta3.ListEvaluationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jmc
36,940
application/org.openjdk.jmc.flightrecorder.ui/src/main/java/org/openjdk/jmc/flightrecorder/ui/messages/internal/Messages.java
/* * Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved. * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The contents of this file are subject to the terms of either the Universal Permissive License * v 1.0 as shown at https://oss.oracle.com/licenses/upl * * or the following license: * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.openjdk.jmc.flightrecorder.ui.messages.internal; import org.eclipse.osgi.util.NLS; public class Messages extends NLS { private static final String BUNDLE_NAME = "org.openjdk.jmc.flightrecorder.ui.messages.internal.messages"; //$NON-NLS-1$ public static String AgentsPage_PAGE_NAME; public static String ATTRIBUTE_CONFIG_PAGE_RULES; public static String ATTRIBUTE_CONFIG_RULE; public static String ATTRIBUTE_CONFIG_RULES; public static String ATTRIBUTE_CONFIG_RULES_DESC; public static String ATTRIBUTE_CONFIG_RULE_DESC; public static String ATTRIBUTE_CONFIG_SHARED_GROUP_NAME; public static String ATTRIBUTE_DESCRIPTION_LABEL; public static String ATTRIBUTE_ID_LABEL; public static String ATTRIBUTE_NOT_SHARED; public static String ATTR_HEAP_USED_POST_GC; public static String ATTR_HEAP_USED_POST_GC_DESC; public static String CHART_ZOOM_TO_SELECTED_RANGE; public static String CONFIGURATION_EDIT_ACTION; public static String COUNT_COLUMN_NAME; public static String ChartAndTableUI_HISTOGRAM_SELECTION; public static String ChartAndTableUI_TIMELINE_SELECTION; public static String ChartToolTipProvider_CAPTION_DESCRIPTION; public static String ChartToolTipProvider_CAPTION_NAME; public static String ClassLoadingPage_AGGR_CLASSES_LOADED; public static String ClassLoadingPage_AGGR_CLASSES_LOADED_BY_CLASSLOADER; public static String ClassLoadingPage_AGGR_CLASSES_LOADED_BY_CLASSLOADER_DESC; public static String ClassLoadingPage_AGGR_CLASSES_LOADED_DESC; public static String ClassLoadingPage_AGGR_CLASSES_UNLOADED; public static String ClassLoadingPage_AGGR_CLASSES_UNLOADED_BY_CLASSLOADER; public static String ClassLoadingPage_AGGR_CLASSES_UNLOADED_BY_CLASSLOADER_DESC; public static String ClassLoadingPage_AGGR_CLASSES_UNLOADED_DESC; public static String ClassLoadingPage_CLASS_LOADER_TAB; public static String ClassLoadingPage_CLASS_LOADER_STATISTICS_ACTION; public static String ClassLoadingPage_CLASS_LOADER_STATISTICS_LIST_SELECTION; public static String ClassLoadingPage_CLASS_LOADER_STATISTICS_TAB_TITLE; public static String ClassLoadingPage_CLASS_LOADING_ACTION; public static String ClassLoadingPage_CLASS_LOADING_ACTION_DESC; public static String ClassLoadingPage_CLASS_LOADING_HISTOGRAM_SELECTION; public static String ClassLoadingPage_CLASS_LOADING_LIST_SELECTION; public static String ClassLoadingPage_CLASS_LOADING_TAB_TITLE; public static String ClassLoadingPage_CLASS_LOADING_TIMELINE_SELECTION; public static String ClassLoadingPage_CLASS_UNLOADING_ACTION; public static String ClassLoadingPage_CLASS_UNLOADING_ACTION_DESC; public static String ClassLoadingPage_CLASS_UNLOADING_LIST_SELECTION; public static String ClassLoadingPage_CLASS_UNLOADING_TAB_TITLE; public static String ClassLoadingPage_CLASS_DEFINE_ACTION; public static String ClassLoadingPage_CLASS_DEFINE_LIST_SELECTION; public static String ClassLoadingPage_CLASS_DEFINE_TAB_TITLE; public static String ClassLoadingPage_PAGE_NAME; public static String ClassLoadingPage_ROW_CLASSES_LOADED; public static String ClassLoadingPage_ROW_CLASSES_UNLOADED; public static String ClassLoadingPage_ROW_CLASS_LOADING_STATISTICS; public static String CodeCachePage_OVERLAYS_SWEEPS_DESC; public static String CodeCachePage_OVERLAY_SWEEPS; public static String CodeCachePage_PAGE_NAME; public static String CodeCachePage_ROW_ADAPTORS; public static String CodeCachePage_ROW_ENTRIES; public static String CodeCachePage_ROW_METHODS; public static String CodeCachePage_ROW_UNALLOCATED; public static String CodeCachePage_SWEEPS_TIMELINE_SELECTION; public static String CodeCachePage_SWEEP_TABLE_SELECTION; public static String CompilationsPage_COMPILATIONS_DURATION_SELECTION; public static String CompilationsPage_COMPILATIONS_FAILED_TABLE_SELECTION; public static String CompilationsPage_COMPILATIONS_TABLE_SELECTION; public static String CompilationsPage_PAGE_NAME; public static String CompilationsPage_ROW_DURATIONS; public static String CompilationsPage_TAB_COMPILATIONS; public static String CompilationsPage_TAB_COMPILATIONS_FAILED; public static String ConstantPoolsPage_PAGE_NAME; public static String ConstantPoolsPage_SIZE_TOTAL_PERCENTAGE; public static String ConstantPoolsPage_SIZE_TOTAL_PERCENTAGE_DESC; public static String DropdownLaneFilter_QUICK_FILTER; public static String DropdownLaneFilter_THREAD_STATE_SELECTION; public static String DUMP_RECORDING_DEFAULT_TIMESPAN_TO_DUMP; public static String DUMP_RECORDING_NO_DEFAULT; public static String DUMP_RECORDING_TIMESPAN; public static String DUMP_RECORDING_TIMESPAN_LESS_THAN_ZERO; public static String DUMP_RECORDING_TIMESPAN_UNPARSABLE; public static String DUMP_RECORDING_TIMESPAN_VALUE; public static String DUMP_RECORDING_WHOLE; public static String DurationPercentileTable_PERCENTILE_COL_NAME; public static String EVENT_TYPE_FOLDER_NODE_EVENTS_BY_TYPE; public static String EVENT_TYPE_FOLDER_NODE_UNCATEGORIZED; public static String EVENT_TYPE_TREE_NODE_TOOLTIP; public static String EVENT_TYPE_TREE_TITLE; public static String EnvironmentVariablesPage_PAGE_NAME; public static String EventBrowserPage_DISPLAY_TYPES_WITHOUT_EVENTS; public static String EventBrowserPage_EVENT_BROWSER_SELECTION; public static String EventBrowserPage_EVENT_TYPE_TREE_SELECTION; public static String EventBrowserPage_NEW_PAGE_USING_TYPES_ACTION; public static String EventBrowserPage_PAGE_DESC; public static String EventBrowserPage_PAGE_NAME; public static String ExceptionsPage_CLASS_AND_MESSAGE; public static String ExceptionsPage_GROUP_BY_CLASS_ACTION; public static String ExceptionsPage_GROUP_BY_CLASS_AND_MESSAGE_ACTION; public static String ExceptionsPage_GROUP_BY_MESSAGE_ACTION; public static String ExceptionsPage_PAGE_NAME; public static String ExceptionsPage_ROW_ERRORS; public static String ExceptionsPage_ROW_EXCEPTIONS; public static String ExceptionsPage_ROW_STATISTICS; public static String ExceptionsPage_THROWABLES_HISTOGRAM_SELECTION; public static String ExceptionsPage_THROWABLES_LOG_SELECTION; public static String ExceptionsPage_THROWABLES_TIMELINE_SELECTION; public static String FILE_OPENER_COULD_NOT_LOAD_FILE; public static String FILE_OPENER_JROCKIT_TEXT; public static String FILE_OPENER_JROCKIT_TITLE; public static String FILE_OPENER_LOAD_JOB_TITLE; public static String FILE_OPENER_VERSION_NOT_SUPPORTED; public static String FILE_OPENER_WARNING_NO_EVENTS; public static String FILE_OPENER_WARNING_SHORT_TIME; public static String FILE_OPENER_WARNING_TITLE; public static String FILE_OPENER_ZIPPED_FILE_TEXT; public static String FILE_OPENER_ZIPPED_FILE_TITLE; public static String FILTER_ADD_FROM_ATTRIBUTE; public static String FILTER_ADD_FROM_SELECTION; public static String FILTER_NO_ATTRIBUTE_AVAILABLE; public static String FILTER_NO_SELECTION_AVAILABLE; public static String FILTER_SHOW_FILTER_ACTION; public static String FILTER_SHOW_SEARCH_ACTION; public static String FLAVOR_CONTAINS; public static String FLAVOR_FILTER_AND_FILTER; public static String FLAVOR_IS; public static String FLAVOR_IS_IN_INTERVAL; public static String FLAVOR_IS_IN_SET; public static String FLAVOR_SELECTED_EVENTS; public static String FLAVOR_SELECTED_RANGE; public static String FOLDER_COULD_NOT_BE_CREATED; public static String FileIOPage_DURATION_SELECTION; public static String FileIOPage_SIZE_SELECTION; public static String FileIOPage_HISTOGRAM_SELECTION; public static String FileIOPage_LOG_SELECTION; public static String FileIOPage_PAGE_NAME; public static String FileIOPage_PERCENTILE_SELECTION; public static String FileIOPage_ROW_FILE_READ; public static String FileIOPage_ROW_FILE_WRITE; public static String FileIOPage_ROW_FILE_FORCE; public static String FileIOPage_SELECTED_PATH; public static String FileIOPage_SELECTED_PATHS; public static String FileIOPage_TIMELINE_SELECTION; public static String FilterEditor_ACTION_CLEAR_ALL; public static String FilterEditor_ACTION_COMBINE_AND; public static String FilterEditor_ACTION_COMBINE_OR; public static String FilterEditor_ACTION_NEGATE; public static String FilterEditor_ACTION_REMOVE; public static String FilterEditor_ACTION_SHOW_COLUMN_HEADERS; public static String FilterEditor_COLUMN_ATTRIBUTE; public static String FilterEditor_COLUMN_OPERATION; public static String FilterEditor_COLUMN_VALUE; public static String FilterEditor_INVALID_REGEX; public static String FilterEditor_KIND_CONTAINS; public static String FilterEditor_KIND_DOESNT_EXIST; public static String FilterEditor_KIND_EXISTS; public static String FilterEditor_KIND_HAS_CENTER_IN; public static String FilterEditor_KIND_INTERSECTS; public static String FilterEditor_KIND_IS; public static String FilterEditor_KIND_ISNT_NULL; public static String FilterEditor_KIND_IS_CONTAINED_IN; public static String FilterEditor_KIND_IS_NULL; public static String FilterEditor_KIND_MATCHES; public static String FilterEditor_KIND_NOT_CONTAINS; public static String FilterEditor_KIND_NOT_HAS_CENTER_IN; public static String FilterEditor_KIND_NOT_INTERSECTS; public static String FilterEditor_KIND_NOT_IS_CONTAINED_IN; public static String FilterEditor_KIND_NOT_MATCHES; public static String FilterEditor_KIND_UNKNOWN; public static String FilterEditor_LABEL_EMPTY; public static String FilterEditor_LABEL_NAME_AND; public static String FilterEditor_LABEL_NAME_NOT_AND; public static String FilterEditor_LABEL_NAME_NOT_OR; public static String FilterEditor_LABEL_NAME_OR; public static String FilterEditor_LABEL_NAME_TYPE; public static String FilterEditor_LABEL_NAME_UNKNOWN_FILTER; public static String FilterEditor_LABEL_VALUE_UNKNOWN; public static String FilterEditor_TOOLTIP_EMPTY; public static String FlavorSelector_BUTTON_CONTAINED; public static String FlavorSelector_BUTTON_CONTAINED_TOOLTIP; public static String FlavorSelector_BUTTON_SAME_THREADS; public static String FlavorSelector_BUTTON_SAME_THREADS_TOOLTIP; public static String FlavorSelector_BUTTON_SHOW_CONCURRENT; public static String FlavorSelector_BUTTON_SHOW_CONCURRENT_TOOLTIP; public static String FlavorSelector_BUTTON_TIMERANGE_CLEAR; public static String FlavorSelector_BUTTON_TIMERANGE_CLEAR_TOOLTIP; public static String FlavorSelector_BUTTON_TIMERANGE_SET; public static String FlavorSelector_BUTTON_TIMERANGE_SET_TOOLTIP; public static String FlavorSelector_LABEL_ASPECT; public static String FlavorSelector_LABEL_SELECTION; public static String FlavorSelector_LABEL_NO_SELECTION; public static String FlavorSelector_LABEL_TIMERANGE; public static String GCConfigurationPage_PAGE_NAME; public static String GCConfigurationPage_SECTION_GC_CONFIG; public static String GCConfigurationPage_SECTION_HEAP_CONFIG; public static String GCConfigurationPage_SECTION_YOUNG_CONFIG; public static String GCConfigurationPage_SECTION_JVM_GC_FLAGS; public static String GCConfigurationPage_COLUMN_VALUE; public static String GCSummaryPage_PAGE_NAME; public static String GCSummaryPage_SECTION_YOUNG_COLLECTION; public static String GCSummaryPage_SECTION_OLD_COLLECTION; public static String GCSummaryPage_SECTION_ALL_COLLECTION; public static String GCSummaryPage_SECTION_ALL_COLLECTION_PAUSE; public static String GarbageCollectionsPage_COMMITTED_HEAP_DELTA; public static String GarbageCollectionsPage_COMMITTED_METASPACE_DELTA; public static String GarbageCollectionsPage_DISABLED_TOOLTIP; public static String GarbageCollectionsPage_LIST_SELECTION; public static String GarbageCollectionsPage_METASPACE_SELECTION; public static String GarbageCollectionsPage_METASPACE_TITLE; public static String GarbageCollectionsPage_PAGE_NAME; public static String GarbageCollectionsPage_PAUSE_PHASES_TITLE; public static String GarbageCollectionsPage_PAUSE_PHASE_SELECTION; public static String GarbageCollectionsPage_ROW_HEAP; public static String GarbageCollectionsPage_ROW_HEAP_DESC; public static String GarbageCollectionsPage_ROW_HEAP_POST_GC; public static String GarbageCollectionsPage_ROW_HEAP_POST_GC_DESC; public static String GarbageCollectionsPage_ROW_METASPACE; public static String GarbageCollectionsPage_ROW_METASPACE_DESC; public static String GarbageCollectionsPage_ROW_PAUSE_PHASES; public static String GarbageCollectionsPage_ROW_PAUSE_PHASES_DESC; public static String GarbageCollectionsPage_TIMELINE_SELECTION; public static String GarbageCollectionsPage_USED_HEAP_AFTER_GC; public static String GarbageCollectionsPage_USED_HEAP_BEFORE_GC; public static String GarbageCollectionsPage_USED_HEAP_DELTA; public static String GarbageCollectionsPage_USED_METASPACE_DELTA; public static String HDRHistogramView_NO_VALID_SELECTION_TEXT; public static String HDRHistogramView_DURATION_COLUMN_NAME; public static String HDRHistogramView_EVENT_COUNT_COLUMN_NAME; public static String HDRHistogramView_PERCENTILE_SELECTION; public static String HDRHistogramView_DURATION_SELECTION; public static String HDRHistogramView_DURATIONS_CHART_TITLE; public static String HDRHistogramView_DURATIONS_CHART_DESCRIPTION; public static String HeapPage_LIVE_SIZE_OF_CLASS; public static String HeapPage_OVERLAY_GC; public static String HeapPage_OVERLAY_GC_DESC; public static String HeapPage_PAGE_NAME; public static String HeapPage_ROW_ALLOCATION; public static String HeapPage_ROW_LIVE_SIZE; public static String HeapPage_ROW_LIVE_SIZE_DESC; public static String HeapPage_ROW_MEMORY_USAGE; public static String HeapPage_ROW_MEMORY_USAGE_DESC; public static String HeapPage_SELECTED_CLASS; public static String HeapPage_SELECTED_CLASSES; public static String HeapPage_ALLOCATION_TOTAL_PERCENTAGE; public static String HeapPage_ALLOCATION_TOTAL_PERCENTAGE_DESC; public static String HeapPage_SIZE_TOTAL_PERCENTAGE; public static String HeapPage_SIZE_TOTAL_PERCENTAGE_DESC; public static String INFORMATION_COMPONENT_NOT_AVAILABLE; public static String IO_PAGE_DURATIONS_DESCRIPTION; public static String IO_PAGE_EVENT_LOG_DESCRIPTION; public static String IO_PAGE_TIMELINE_DESCRIPTION; public static String IO_PAGE_SIZE_DESCRIPTION; public static String ITEMHANDLER_CHART_DESCRIPTION; public static String ITEMHANDLER_CHART_TITLE; public static String ITEMHANDLER_LIST_DESCRIPTION; public static String ITEMHANDLER_LIST_TITLE; public static String ITEMHANDLER_LOG_SELECTION; public static String ITEM_CHART_COLOR_SPAN_BY; public static String ITEM_CHART_SHOW_GROUPING_AS_SPAN_CHART; public static String ITEM_CHART_SHOW_IN_BAR_CHART; public static String ITEM_CHART_SHOW_IN_LINE_CHART; public static String ITEM_CHART_TIMELINE_SELECTION; public static String ITEM_LIST_ELLIPSIS_TEXT; public static String ITEM_LIST_SIZE_LESS_THAN_ZERO; public static String ITEM_LIST_SIZE_PREF; public static String ITEM_LIST_SIZE_PREF_TOOLTIP; public static String ITEM_LIST_SIZE_UNPARSABLE; public static String ItemHandlerPage_DEFAULT_PAGE_NAME; public static String ItemHandlerPage_PAGE_EVENTS_COUNT_TOOLTIP; public static String ItemHandlerPage_SET_TOPICS_ACTION; public static String ItemHandlerPage_SET_TOPICS_DIALOG_MESSAGE; public static String ItemHandlerPage_SET_TOPICS_TITLE; public static String ItemHandlerPage_SHOW_FILTER_ACTION; public static String JFR_EDITOR_INVALID_RECORDING_TEXT; public static String JFR_EDITOR_NO_PAGES_TO_SHOW; public static String JFR_EDITOR_PAGE_CANNOT_BE_DISPLAYED; public static String JFR_EDITOR_RULES_CANCELLED; public static String JFR_EDITOR_RULES_EVALUATING; public static String JFR_EDITOR_RULES_IGNORED; public static String JFR_EDITOR_RULES_IGNORED_REASON; public static String JFR_EDITOR_RULES_SCHEDULED; public static String JFR_EDITOR_RULES_TASK_NAME; public static String JFR_EDITOR_RULES_WAITING; public static String JFR_EDITOR_RULE_EVALUATION_ERROR_DESCRIPTION; public static String JFR_OUTLINE_CREATE_CUSTOM_MESSAGE; public static String JFR_OUTLINE_CREATE_CUSTOM_TITLE; public static String JFR_OUTLINE_CUSTOM_PAGE; public static String JFR_OUTLINE_DELETE_CONFIRM_MESSAGE; public static String JFR_OUTLINE_DELETE_CONFIRM_TITLE; public static String JFR_OUTLINE_LOCK_PAGES_ACTION; public static String JFR_OUTLINE_MOVE_DOWN; public static String JFR_OUTLINE_MOVE_LEFT; public static String JFR_OUTLINE_MOVE_RIGHT; public static String JFR_OUTLINE_MOVE_UP; public static String JFR_OUTLINE_NEW_PAGE; public static String JFR_OUTLINE_RESET_ACTION; public static String JFR_OUTLINE_RESET_ALL_ACTION; public static String JFR_OUTLINE_RESET_ALL_CONFIRM_MESSAGE; public static String JFR_OUTLINE_RESET_ALL_CONFIRM_TITLE; public static String JFR_OUTLINE_RESET_CONFIRM_MESSAGE; public static String JFR_OUTLINE_RESET_CONFIRM_TITLE; public static String JFR_PROPERTIES_ARRAY_WITH_OMITTED_ELEMENT; public static String JFR_PROPERTIES_ARRAY_WITH_OMITTED_ELEMENTS; public static String JFR_PROPERTIES_CALCULATING; public static String JFR_PROPERTIES_INSERTED_ELLIPSIS; public static String JFR_PROPERTIES_PROPERTY_SELECTION; public static String JFR_PROPERTIES_TOO_MANY_VALUES; public static String JFR_PROPERTY_SHEET_EVENTS; public static String JFR_PROPERTY_SHEET_FIELD; public static String JFR_PROPERTY_SHEET_VALUE; public static String JFR_PROPERTY_SHEET_VERBOSE_VALUE; public static String JVMInformationPage_COLUMN_NEW_VALUE; public static String JVMInformationPage_COLUMN_OLD_VALUE; public static String JVMInformationPage_COLUMN_VALUE; public static String JVMInformationPage_EMPTY_TABLE; public static String JVMInformationPage_PAGE_NAME; public static String JVMInformationPage_SECTION_JVM_FLAGS; public static String JVMInformationPage_SECTION_JVM_FLAGS_LOG; public static String JVMInformationPage_SECTION_JVM_INFO; public static String JavaApplicationPage_COLUMN_THREAD_DURATION; public static String JavaApplicationPage_COLUMN_THREAD_DURATION_DESC; public static String JavaApplicationPage_COLUMN_THREAD_END; public static String JavaApplicationPage_COLUMN_THREAD_END_DESC; public static String JavaApplicationPage_COLUMN_THREAD_PERCENTAGE; public static String JavaApplicationPage_COLUMN_THREAD_PERCENTAGE_DESC; public static String JavaApplicationPage_COLUMN_THREAD_START; public static String JavaApplicationPage_COLUMN_THREAD_START_DESC; public static String JavaApplicationPage_EDIT_THREAD_LANES_ACTION; public static String JavaApplicationPage_EDIT_THREAD_LANES_DIALOG_MESSAGE; public static String JavaApplicationPage_EDIT_THREAD_LANES_DIALOG_TITLE; public static String JavaApplicationPage_HALTS_ACTION; public static String JavaApplicationPage_HALTS_ACTION_DESC; public static String JavaApplicationPage_METHOD_PROFILING; public static String JavaApplicationPage_METHOD_PROFILING_DESC; public static String JavaApplicationPage_PAGE_NAME; public static String JavaApplicationPage_ROW_ALLOCATION; public static String JavaApplicationPage_ROW_CPU_USAGE; public static String JavaApplicationPage_ROW_CPU_USAGE_DESC; public static String JavaApplicationPage_ROW_HEAP_USAGE; public static String JavaApplicationPage_ROW_RSS; public static String JavaApplicationPage_ROW_RSS_DESC; public static String JavaApplicationPage_ROW_THREAD_COUNTS; public static String JavaApplicationPage_ROW_THREAD_COUNTS_DESC; public static String JavaApplicationPage_SELECTED_THREAD; public static String JavaApplicationPage_SELECTED_THREADS; public static String JavaApplicationPage_THREAD_ACTIVITY_ACTION; public static String JavaApplicationPage_THREAD_ACTIVITY_ACTION_DESC; public static String JavaApplicationPage_THREAD_LANE_JAVA_LATENCIES; public static String JavaApplicationPage_THREAD_LIFESPAN; public static String LANES_ADD_LANE_ACTION; public static String LANES_CHECK_TO_INCLUDE; public static String LANES_DEFINITION_DESC; public static String LANES_DEFINITION_NAME; public static String LANES_DELETE_NOT_ALLOWED_WARNING; public static String LANES_DELETE_NOT_ALLOWED_WARNING_DESC; public static String LANES_EDITOR_LABEL; public static String LANES_EDIT_NOT_ALLOWED_WARNING; public static String LANES_EDIT_NOT_ALLOWED_WARNING_DESC; public static String LANES_EMPTY_LANE; public static String LANES_FILTER_LABEL; public static String LANES_LANE_COLUMN; public static String LANES_MOVE_DOWN_ACTION; public static String LANES_MOVE_UP_ACTION; public static String LANES_OTHER_TYPES; public static String LockInstancesPage_ADDRESS_HISTOGRAM_SELECTION; public static String LockInstancesPage_AGGR_BY_ADDRESS; public static String LockInstancesPage_AGGR_BY_ADDRESS_DESC; public static String LockInstancesPage_AGGR_BY_THREAD; public static String LockInstancesPage_AGGR_BY_THREAD_DESC; public static String LockInstancesPage_CLASS_HISTOGRAM_SELECTION; public static String LockInstancesPage_PAGE_NAME; public static String LockInstancesPage_THREAD_HISTOGRAM_SELECTION; public static String MemoryLeakPage_ADDRESS_COLUMN_HEADER; public static String MemoryLeakPage_COUNT_COLUMN_HEADER; public static String MemoryLeakPage_DESCRIPTION_COLUMN_HEADER; public static String MemoryLeakPage_OBJECT_FORMATTING_OPTIONS; public static String MemoryLeakPage_OBJECT_FORMAT_ARRAY; public static String MemoryLeakPage_OBJECT_FORMAT_FIELD; public static String MemoryLeakPage_OBJECT_FORMAT_OTHER_MOD; public static String MemoryLeakPage_OBJECT_FORMAT_PACKAGE; public static String MemoryLeakPage_OBJECT_FORMAT_STATIC_MOD; public static String MemoryLeakPage_OBJECT_SAMPLES_SELECTION; public static String MemoryLeakPage_OBJECT_SAMPLE_COLUMN_HEADER; public static String MemoryLeakPage_PAGE_NAME; public static String MemoryLeakPage_RELEVANCE_COLUMN_HEADER; public static String MemoryLeakPage_STEPS_SKIPPED; public static String MethodProfilingPage_CLASS_HISTOGRAM_SELECTION; public static String MethodProfilingPage_METHOD_CONTENT_TYPE_DESCRIPTION; public static String MethodProfilingPage_METHOD_DESCRIPTION; public static String MethodProfilingPage_METHOD_TITLE; public static String MethodProfilingPage_PACKAGE_HISTOGRAM_SELECTION; public static String MethodProfilingPage_PAGE_NAME; public static String MethodProfilingPage_PREDECESSORS_DESCRIPTION; public static String MethodProfilingPage_SUCCESSORS_DESCRIPTION; public static String NativeLibraryPage_PAGE_NAME; public static String NAVIGATOR_MOVE_BACKWARD_TEXT; public static String NAVIGATOR_MOVE_FORWARD_TEXT; public static String NAVIGATOR_SELECT_ALL_TEXT; public static String NAVIGATOR_ZOOM_IN_TEXT; public static String NAVIGATOR_ZOOM_OUT_TEXT; public static String N_A; public static String PAGES_DURATIONS; public static String PAGES_EVENT_LOG; public static String PAGES_TIMELINE; public static String PAGES_SIZE; public static String PAGES_SUCCESSORS; public static String PAGES_PREDECESSORS; public static String PAGE_CHANGE_ICON_CHOOSE_IMAGE_FILE; public static String PAGE_CHANGE_ICON_CURRENT_ICON; public static String PAGE_CHANGE_ICON_MENU_ACTION; public static String PAGE_CHANGE_ICON_NEW_ICON_PREVIEW; public static String PAGE_CHANGE_ICON_WIZARD_PAGE_DESC; public static String PAGE_CHANGE_ICON_WIZARD_PAGE_TITLE; public static String PAGE_CHANGE_ICON_WIZARD_TITLE; public static String PAGE_EXPORT_ERROR_MESSAGE; public static String PAGE_EXPORT_ERROR_TITLE; public static String PAGE_EXPORT_WIZARD_TITLE; public static String PAGE_IMPORT_ERROR_MESSAGE; public static String PAGE_IMPORT_ERROR_TITLE; public static String PAGE_IMPORT_WIZARD_TITLE; public static String PAGE_MANAGER_FACTORY_NOT_INSTALLED; public static String PAGE_MANAGER_MISSING_IMPLEMENTATION; public static String PAGE_RENAME_DIALOG_MESSAGE; public static String PAGE_RENAME_DIALOG_TITLE; public static String PAGE_RENAME_MENU_ACTION; public static String PAGE_UNNAMED; public static String PREFERENCES_ALLOW_INCOMPLETE_RECORDING_FILE; public static String PREFERENCES_CONFIRM_REMOVE_TEMPLATE_TEXT; public static String PREFERENCES_ENABLED_RULES; public static String PREFERENCES_ENABLE_RECORDING_ANALYSIS; public static String PREFERENCES_EVALUATION_THREAD_NUMBER_LESS_THAN_ONE; public static String PREFERENCES_EVALUATION_THREAD_NUMBER_TEXT; public static String PREFERENCES_EVALUATION_THREAD_NUMBER_TOOLTIP; public static String PREFERENCES_EVALUATION_THREAD_NUMBER_UNPARSEABLE; public static String PREFERENCES_GENERAL_SETTINGS_TEXT; public static String PREFERENCES_INCLUDE_EXPERIMENTAL_EVENTS_AND_FIELDS; public static String PREFERENCES_PROPERTIES_ARRAY_STRING_SIZE_TEXT; public static String PREFERENCES_PROPERTIES_ARRAY_STRING_SIZE_TOOLTIP; public static String PREFERENCES_REMOVE_FINISHED_RECORDING_TEXT; public static String PREFERENCES_RULES_CONFIGURE_SELECTED; public static String PREFERENCES_SHOW_MONITORING_WARNING_TEXT; public static String PREFERENCES_STACKTRACE_SHOW_HIDDEN_FRAMES; public static String PREFERENCES_WEBSOCKET_SERVER_PORT_INVALID; public static String PREFERENCES_WEBSOCKET_SERVER_PORT_TEXT; public static String PREFERENCES_WEBSOCKET_SERVER_PORT_TOOLTIP; public static String ProcessesPage_AGGR_CONCURRENT_PROCESSES; public static String ProcessesPage_AGGR_CONCURRENT_PROCESSES_DESC; public static String ProcessesPage_AGGR_FIRST_SAMPLE; public static String ProcessesPage_AGGR_FIRST_SAMPLE_DESC; public static String ProcessesPage_AGGR_LAST_SAMPLE; public static String ProcessesPage_AGGR_LAST_SAMPLE_DESC; public static String ProcessesPage_PAGE_NAME; public static String ProcessesPage_ROW_CONCURRENT_PROCESSES; public static String ProcessesPage_ROW_CPU_USAGE; public static String ProcessesPage_ROW_CPU_USAGE_DESC; public static String RESULT_VIEW_ANALYSIS_DISABLED; public static String RESULT_VIEW_NO_EDITOR_SELECTED; public static String RULESPAGE_SHOW_OK_RESULTS_ACTION; public static String RULESPAGE_SHOW_IGNORE_RESULTS_ACTION; public static String RULES_SHOW_RESULTS_ACTION; public static String RULES_STATISTICS; public static String RecordingPage_CONCURRENT_RECORDINGS_SELECTION; public static String RecordingPage_EVENT_SETTINGS_SELECTION; public static String RecordingPage_PAGE_NAME; public static String RecordingPage_RECORDING_EVENTS_END; public static String RecordingPage_RECORDING_EVENTS_START; public static String RecordingPage_RECORDING_EVENT_DURATION; public static String RecordingPage_RECORDING_EVENT_DURATION_DESC; public static String RecordingPage_RECORDING_EVENT_END_DESC; public static String RecordingPage_RECORDING_EVENT_START_DESC; public static String RecordingPage_SECTION_CONCURRENT_RECORDINGS; public static String RecordingPage_SECTION_EVENT_SETTINGS; public static String RecordingPage_SECTION_RECORDING_INFORMATION; public static String ResultOverview_COLUMN_EXPLANATION; public static String ResultOverview_COLUMN_PAGE; public static String ResultOverview_COLUMN_RESULT_ID; public static String ResultOverview_COLUMN_RULE_NAME; public static String ResultOverview_COLUMN_SCORE; public static String ResultOverview_COLUMN_SUMMARY; public static String ResultOverview_COLUMN_SOLUTION; public static String ResultOverview_DISPLAYMODE_REPORT; public static String ResultOverview_DISPLAYMODE_TABLE; public static String ResultOverview_EXPORT_ACTION; public static String ResultOverview_EXPORT_DIALOG_MESSAGE; public static String ResultOverview_EXPORT_DIALOG_TITLE; public static String ResultOverview_PAGE_DESC; public static String ResultOverview_PAGE_NAME; public static String ResultOverview_BROWSER_ACTION; public static String ResultTableUi_SCORE_TOOLTIP; public static String RuleManager_NULL_RESULT_DESCRIPTION; public static String SAVE_AS_ERROR_MSG; public static String SAVE_AS_JFR_DESCRIPTION; public static String SAVE_AS_NO_SRC_ERROR_MSG; public static String SAVE_AS_TITLE; public static String SEARCH_TREE_TEXT; public static String SELECTION_STORE_NO_SELECTION; public static String SELECT_RANGE_WIZARD_DESCRIPTION; public static String SELECT_RANGE_WIZARD_TEXT; public static String SELECT_RANGE_WIZARD_TITLE; public static String SELECT_RANGE_WIZARD_TO_MUCH_SELECTED_WARNING; public static String STACKTRACE_VIEW_COUNT_COLUMN_NAME; public static String STACKTRACE_VIEW_PERCENTAGE_COLUMN_NAME; public static String STACKTRACE_VIEW_PERCENTAGE_BY_DURATION_COLUMN_NAME; public static String STACKTRACE_VIEW_DURATION_COLUMN_NAME; public static String STACKTRACE_VIEW_DISTINGUISH_FRAMES_BY; public static String STACKTRACE_VIEW_FRAME_GROUP_CHOOSE; public static String STACKTRACE_VIEW_FRAME_GROUP_NEXT; public static String STACKTRACE_VIEW_FRAME_GROUP_PREVIOUS; public static String STACKTRACE_VIEW_GROUP_FROM; public static String STACKTRACE_VIEW_GROUP_TRACES_FROM_LAST_FRAME; public static String STACKTRACE_VIEW_GROUP_TRACES_FROM_ROOT; public static String STACKTRACE_VIEW_LAST_FRAME; public static String STACKTRACE_VIEW_LAYOUT_OPTIONS; public static String STACKTRACE_VIEW_OPTIMIZATION_TYPE; public static String STACKTRACE_VIEW_REDUCE_TREE_DEPTH; public static String STACKTRACE_VIEW_SELECTION; public static String STACKTRACE_VIEW_TREE_VIEW; public static String STACKTRACE_VIEW_DURATION; public static String STACKTRACE_VIEW_STACK_TRACE; public static String STACKTRACE_VIEW_THREAD_ROOT; public static String STACKTRACE_VIEW_TRACES_IN_GROUP; public static String STACKTRACE_VIEW_TRACES_IN_GROUPS; public static String STACKTRACE_VIEW_TRACES_OF_TOTAL; public static String STACKTRACE_VIEW_TRACE_IN_GROUP; public static String STACKTRACE_VIEW_TRACE_IN_GROUPS; public static String STACKTRACE_VIEW_TRACE_OF_TOTAL; public static String STACKTRACE_VIEW_TRACE_OF_TOTAL_COUNT; public static String STACKTRACE_VIEW_TRACES_OF_TOTAL_COUNT; public static String STORED_SELECTIONS_SIZE_LESS_THAN_ZERO; public static String STORED_SELECTIONS_SIZE_PREF; public static String STORED_SELECTIONS_SIZE_UNPARSABLE; public static String STORE_AND_ACTIVATE_SELECTION_ACTION; public static String STORE_SELECTION_ACTION; public static String SocketIOPage_BY_HOST_ACTION; public static String SocketIOPage_BY_HOST_AND_PORT_ACTION; public static String SocketIOPage_BY_PORT_ACTION; public static String SocketIOPage_DURATION_SELECTION; public static String SocketIOPage_SIZE_SELECTION; public static String SocketIOPage_HISTOGRAM_SELECTION; public static String SocketIOPage_HOST_AND_PORT; public static String SocketIOPage_LOG_SELECTION; public static String SocketIOPage_PAGE_NAME; public static String SocketIOPage_PERCENTILE_SELECTION; public static String SocketIOPage_ROW_SOCKET_READ; public static String SocketIOPage_ROW_SOCKET_WRITE; public static String SocketIOPage_SELECTED_HOST; public static String SocketIOPage_SELECTED_HOSTS; public static String SocketIOPage_SELECTED_HOSTS_AND_PORT; public static String SocketIOPage_SELECTED_HOSTS_AND_PORTS; public static String SocketIOPage_SELECTED_HOSTS_PORTS; public static String SocketIOPage_SELECTED_HOST_AND_PORT; public static String SocketIOPage_SELECTED_HOST_AND_PORTS; public static String SocketIOPage_SELECTED_PORT; public static String SocketIOPage_SELECTED_PORTS; public static String SocketIOPage_TIMELINE_SELECTION; public static String SystemPage_PAGE_NAME; public static String SystemPage_SECTION_CPU; public static String SystemPage_SECTION_MEMORY; public static String SystemPage_SECTION_OS; public static String SystemPage_SECTION_VIRTUALIZATION; public static String SystemPropertiesPage_PAGE_NAME; public static String TABLECOMPONENT_COMBINE_GROUP_BY; public static String TABLECOMPONENT_GROUP_BY; public static String TABLECOMPONENT_HISTOGRAM_SELECTION; public static String TABLECOMPONENT_NONE; public static String ThreadDumpsPage_PAGE_NAME; public static String ThreadsPage_EDIT_LANES; public static String ThreadsPage_FOLD_CHART_TOOLTIP; public static String ThreadsPage_FOLD_TABLE_TOOLTIP; public static String ThreadsPage_HIDE_THREAD_ACTION; public static String ThreadsPage_LANE_FILTER_HEADER; public static String ThreadsPage_LANE_TOOLTIP_TITLE; public static String ThreadsPage_NAME; public static String ThreadsPage_NAME_LEGACY; public static String ThreadsPage_RESET_CHART_TO_SELECTION_ACTION; public static String ThreadsPage_SCROLLED_COMPOSITE_NAME; public static String ThreadsPage_SHOW_CHART_TOOLTIP; public static String ThreadsPage_SHOW_TABLE_TOOLTIP; public static String ThreadsPage_TABLE_POPUP_DESCRIPTION; public static String ThreadsPage_TABLE_POPUP_TITLE; public static String ThreadsPage_VIEW_THREAD_DETAILS; public static String TlabPage_PAGE_NAME; public static String TlabPage_THREADS_TAB_NAME; public static String TlabPage_METHODS_TAB_NAME; public static String TlabPage_CLASS_TAB_NAME; public static String TlabPage_SUMMARY_TAB_NAME; public static String TlabPage_SUMMARY_TAB_INSIDE_ALLOCATION; public static String TlabPage_SUMMARY_TAB_OUTSIDE_ALLOCATION; public static String TlabPage_ROW_TLAB_ALLOCATIONS; public static String TlabPage_ROW_TLAB_ALLOCATIONS_DESC; public static String TlabPage_SELECTED_ONE; public static String TlabPage_SELECTED_MANY; public static String TlabPage_INSIDE_TLAB_SUM_PERCENTAGE; public static String TlabPage_INSIDE_TLAB_SUM_PERCENTAGE_DESC; public static String TlabPage_OUTSIDE_TLAB_SUM_PERCENTAGE; public static String TlabPage_OUTSIDE_TLAB_SUM_PERCENTAGE_DESC; public static String VMOPERATION_PAGE_DURATIONS_DESCRIPTION; public static String VMOPERATION_PAGE_EVENT_LOG_DESCRIPTION; public static String VMOPERATION_PAGE_TIMELINE_DESCRIPTION; public static String VMOperationPage_DURATION_SELECTION; public static String VMOperationPage_LOG_SELECTION; public static String VMOperationPage_PAGE_NAME; public static String VMOperationPage_ROW_VM_OPERATIONS; public static String VMOperationPage_TIMELINE_SELECTION; public static String ThreadsPage_LANE_THREAD_ID_TOOLTIP; static { NLS.initializeMessages(BUNDLE_NAME, Messages.class); } private Messages() { } public static String stackTraceMessage(int itemCount, int totalCount, String frameFraction) { String message; if (itemCount == 1) { message = Messages.STACKTRACE_VIEW_TRACE_OF_TOTAL_COUNT; } else { message = Messages.STACKTRACE_VIEW_TRACES_OF_TOTAL_COUNT; } return NLS.bind(message, new Object[] {itemCount, frameFraction, totalCount}); } public static String siblingMessage(int itemsInSiblings, int nSiblings) { String message; if (itemsInSiblings == 1) { if (nSiblings == 1) { message = Messages.STACKTRACE_VIEW_TRACE_IN_GROUP; } else { message = Messages.STACKTRACE_VIEW_TRACE_IN_GROUPS; } } else { if (nSiblings == 1) { message = Messages.STACKTRACE_VIEW_TRACES_IN_GROUP; } else { message = Messages.STACKTRACE_VIEW_TRACES_IN_GROUPS; } } return NLS.bind(message, itemsInSiblings, nSiblings); } public static String stackTraceMessage( double duration, String durationUnit, double totalDuration, String totalDurationUnit, String frameFraction) { String message; if (duration == 1) { message = Messages.STACKTRACE_VIEW_TRACE_OF_TOTAL; } else { message = Messages.STACKTRACE_VIEW_TRACES_OF_TOTAL; } return NLS.bind(message, new Object[] {duration, durationUnit, frameFraction, totalDuration, totalDurationUnit}); } }
apache/druid
37,228
indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.overlord.autoscaling; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.druid.common.guava.DSuppliers; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexing.common.TestTasks; import org.apache.druid.indexing.common.task.NoopTask; import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.overlord.ImmutableWorkerInfo; import org.apache.druid.indexing.overlord.RemoteTaskRunner; import org.apache.druid.indexing.overlord.RemoteTaskRunnerWorkItem; import org.apache.druid.indexing.overlord.ZkWorker; import org.apache.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import org.apache.druid.indexing.overlord.setup.DefaultWorkerBehaviorConfig; import org.apache.druid.indexing.overlord.setup.FillCapacityWorkerSelectStrategy; import org.apache.druid.indexing.overlord.setup.WorkerBehaviorConfig; import org.apache.druid.indexing.worker.TaskAnnouncement; import org.apache.druid.indexing.worker.Worker; import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceEventBuilder; import org.easymock.Capture; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicReference; /** */ public class PendingTaskBasedProvisioningStrategyTest { private AutoScaler autoScaler; private Task testTask; private PendingTaskBasedWorkerProvisioningConfig config; private PendingTaskBasedWorkerProvisioningStrategy strategy; private AtomicReference<WorkerBehaviorConfig> workerConfig; private ScheduledExecutorService executorService = Execs.scheduledSingleThreaded("test service"); private static final String MIN_VERSION = "2014-01-00T00:01:00Z"; private static final String INVALID_VERSION = "0"; @Before public void setUp() { autoScaler = EasyMock.createMock(AutoScaler.class); testTask = TestTasks.immediateSuccess("task1"); config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2); workerConfig = new AtomicReference<>( new DefaultWorkerBehaviorConfig( new FillCapacityWorkerSelectStrategy(null, null), autoScaler ) ); strategy = new PendingTaskBasedWorkerProvisioningStrategy( config, DSuppliers.of(workerConfig), new ProvisioningSchedulerConfig(), new Supplier<>() { @Override public ScheduledExecutorService get() { return executorService; } } ); } @Test public void testGetExpectedWorkerCapacityWithNoWorkerAndHintIsValid() { int capacityHint = 10; config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2) .setWorkerCapacityHint(capacityHint); strategy = new PendingTaskBasedWorkerProvisioningStrategy( config, DSuppliers.of(workerConfig), new ProvisioningSchedulerConfig(), new Supplier<>() { @Override public ScheduledExecutorService get() { return executorService; } } ); int expectedWorkerCapacity = strategy.getExpectedWorkerCapacity(ImmutableList.of()); Assert.assertEquals(capacityHint, expectedWorkerCapacity); } @Test public void testGetExpectedWorkerCapacityWithNoWorkerAndHintIsNotValid() { int capacityHint = -1; config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2) .setWorkerCapacityHint(capacityHint); strategy = new PendingTaskBasedWorkerProvisioningStrategy( config, DSuppliers.of(workerConfig), new ProvisioningSchedulerConfig(), new Supplier<>() { @Override public ScheduledExecutorService get() { return executorService; } } ); int expectedWorkerCapacity = strategy.getExpectedWorkerCapacity(ImmutableList.of()); Assert.assertEquals(1, expectedWorkerCapacity); } @Test public void testGetExpectedWorkerCapacityWithSingleWorker() { int workerCapacity = 3; Collection<ImmutableWorkerInfo> workerInfoCollection = ImmutableList.of( new ImmutableWorkerInfo( new Worker("http", "localhost0", "localhost0", workerCapacity, "v1", WorkerConfig.DEFAULT_CATEGORY), 0, new HashSet<>(), new HashSet<>(), DateTimes.nowUtc() ) ); int expectedWorkerCapacity = strategy.getExpectedWorkerCapacity(workerInfoCollection); Assert.assertEquals(workerCapacity, expectedWorkerCapacity); } @Test public void testGetExpectedWorkerCapacityWithMultipleWorker() { int workerOneCapacity = 3; int workerTwoCapacity = 6; Collection<ImmutableWorkerInfo> workerInfoCollection = ImmutableList.of( new ImmutableWorkerInfo( new Worker("http", "localhost0", "localhost0", workerOneCapacity, "v1", WorkerConfig.DEFAULT_CATEGORY), 0, new HashSet<>(), new HashSet<>(), DateTimes.nowUtc() ), new ImmutableWorkerInfo( new Worker("http", "localhost0", "localhost0", workerTwoCapacity + 3, "v1", WorkerConfig.DEFAULT_CATEGORY), 0, new HashSet<>(), new HashSet<>(), DateTimes.nowUtc() ) ); int expectedWorkerCapacity = strategy.getExpectedWorkerCapacity(workerInfoCollection); // Use capacity of the first worker in the list Assert.assertEquals(workerOneCapacity, expectedWorkerCapacity); } @Test public void testFailIfMinWorkerIsZeroAndWorkerHintNotSet() { EmittingLogger mockLogger = EasyMock.createMock(EmittingLogger.class); Capture<String> capturedArgument = Capture.newInstance(); mockLogger.error(EasyMock.capture(capturedArgument), EasyMock.anyInt()); PendingTaskBasedWorkerProvisioningConfig config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.replay(autoScaler, mockLogger); DefaultWorkerBehaviorConfig defaultWorkerBehaviorConfig = PendingTaskBasedWorkerProvisioningStrategy.getDefaultWorkerBehaviorConfig( DSuppliers.of(workerConfig), config, "test", mockLogger ); Assert.assertNull(defaultWorkerBehaviorConfig); Assert.assertEquals(PendingTaskBasedWorkerProvisioningStrategy.ERROR_MESSAGE_MIN_WORKER_ZERO_HINT_UNSET, capturedArgument.getValue()); } @Test public void testSuccessfulInitialMinWorkersProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( new ArrayList<>() ); EasyMock.expect(runner.getWorkers()).andReturn( Collections.emptyList() ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("aNode")) ).times(3); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); Assert.assertTrue(provisionedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 3); for (ScalingStats.ScalingEvent event : provisioner.getStats().toList()) { Assert.assertTrue( event.getEvent() == ScalingStats.EVENT.PROVISION ); } } @Test public void testProvisionNoCurrentlyRunningWorkerWithCapacityHintSetAndNoPendingTaskShouldProvisionMinimumAsCurrentIsBelowMinimum() { PendingTaskBasedWorkerProvisioningConfig config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2) .setWorkerCapacityHint(30); strategy = new PendingTaskBasedWorkerProvisioningStrategy( config, DSuppliers.of(workerConfig), new ProvisioningSchedulerConfig(), new Supplier<>() { @Override public ScheduledExecutorService get() { return executorService; } } ); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( new ArrayList<>() ); EasyMock.expect(runner.getWorkers()).andReturn( Collections.emptyList() ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("aNode")) ).times(3); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); Assert.assertTrue(provisionedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 3); for (ScalingStats.ScalingEvent event : provisioner.getStats().toList()) { Assert.assertTrue( event.getEvent() == ScalingStats.EVENT.PROVISION ); } } @Test public void testProvisionNoCurrentlyRunningWorkerWithCapacityHintSetAndNoPendingTaskShouldNotProvisionAsMinimumIsZero() { PendingTaskBasedWorkerProvisioningConfig config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2) .setWorkerCapacityHint(30); strategy = new PendingTaskBasedWorkerProvisioningStrategy( config, DSuppliers.of(workerConfig), new ProvisioningSchedulerConfig(), new Supplier<>() { @Override public ScheduledExecutorService get() { return executorService; } } ); // minWorkerCount is 0 EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( new ArrayList<>() ); EasyMock.expect(runner.getWorkers()).andReturn( Collections.emptyList() ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); Assert.assertEquals(0, provisioner.getStats().toList().size()); } @Test public void testSuccessfulMinWorkersProvision() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( new ArrayList<>() ); // 1 node already running, only provision 2 more. EasyMock.expect(runner.getWorkers()).andReturn( Collections.singletonList( new TestZkWorker(testTask).toImmutable() ) ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("aNode")) ).times(2); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); Assert.assertTrue(provisionedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 2); for (ScalingStats.ScalingEvent event : provisioner.getStats().toList()) { Assert.assertTrue( event.getEvent() == ScalingStats.EVENT.PROVISION ); } } @Test public void testSuccessfulMinWorkersProvisionWithOldVersionNodeRunning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( new ArrayList<>() ); // 1 node already running, only provision 2 more. EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("aNode")) ).times(2); EasyMock.replay(runner, autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); Assert.assertTrue(provisionedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 2); for (ScalingStats.ScalingEvent event : provisioner.getStats().toList()) { Assert.assertTrue( event.getEvent() == ScalingStats.EVENT.PROVISION ); } } @Test public void testProvisioning() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()).times(2); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("fake")) ); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( Collections.singletonList( NoopTask.create() ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(1); EasyMock.replay(runner); EasyMock.replay(autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); Assert.assertTrue(provisionedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 1); DateTime createdTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.PROVISION ); provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.PROVISION ); DateTime anotherCreatedTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertTrue( createdTime.equals(anotherCreatedTime) ); EasyMock.verify(autoScaler); EasyMock.verify(runner); } @Test public void testProvisionWithPendingTaskAndWorkerCapacityHintSetButNonEmptyCurrentlyRunningWorkerShouldUseCapcityFromRunningWorker() { PendingTaskBasedWorkerProvisioningConfig config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2) .setWorkerCapacityHint(30); strategy = new PendingTaskBasedWorkerProvisioningStrategy( config, DSuppliers.of(workerConfig), new ProvisioningSchedulerConfig(), new Supplier<>() { @Override public ScheduledExecutorService get() { return executorService; } } ); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(3).times(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()).times(2); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("fake")) ).times(2); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // two pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( ImmutableList.of( NoopTask.create(), NoopTask.create() ) ).times(2); // Capacity for current worker is 1 EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(1); EasyMock.replay(runner); EasyMock.replay(autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); // Expect to use capacity from current worker (which is 1) // and since there are two pending tasks, we will need two more workers Assert.assertTrue(provisionedSomething); Assert.assertEquals(2, provisioner.getStats().toList().size()); DateTime createdTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertEquals(ScalingStats.EVENT.PROVISION, provisioner.getStats().toList().get(0).getEvent()); Assert.assertEquals(ScalingStats.EVENT.PROVISION, provisioner.getStats().toList().get(1).getEvent()); provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.PROVISION ); DateTime anotherCreatedTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertTrue( createdTime.equals(anotherCreatedTime) ); EasyMock.verify(autoScaler); EasyMock.verify(runner); } @Test public void testProvisionWithPendingTaskAndWorkerCapacityHintSetButEmptyCurrentlyRunningWorkerShouldUseCapcityFromHintConfig() { PendingTaskBasedWorkerProvisioningConfig config = new PendingTaskBasedWorkerProvisioningConfig() .setMaxScalingDuration(new Period(1000)) .setNumEventsToTrack(10) .setPendingTaskTimeout(new Period(0)) .setWorkerVersion(MIN_VERSION) .setMaxScalingStep(2) .setWorkerCapacityHint(30); strategy = new PendingTaskBasedWorkerProvisioningStrategy( config, DSuppliers.of(workerConfig), new ProvisioningSchedulerConfig(), new Supplier<>() { @Override public ScheduledExecutorService get() { return executorService; } } ); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(3).times(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()).times(2); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("fake")) ).times(1); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // two pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( ImmutableList.of( NoopTask.create(), NoopTask.create() ) ).times(2); // No currently running worker node EasyMock.expect(runner.getWorkers()).andReturn( Collections.emptyList() ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(1); EasyMock.replay(runner); EasyMock.replay(autoScaler); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); // Expect to use capacity from workerCapacityHint config (which is 30) // and since there are two pending tasks, we will need one more worker Assert.assertTrue(provisionedSomething); Assert.assertEquals(1, provisioner.getStats().toList().size()); DateTime createdTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertEquals(ScalingStats.EVENT.PROVISION, provisioner.getStats().toList().get(0).getEvent()); provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.PROVISION ); DateTime anotherCreatedTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertTrue( createdTime.equals(anotherCreatedTime) ); EasyMock.verify(autoScaler); EasyMock.verify(runner); } @Test public void testProvisionAlert() throws Exception { ServiceEmitter emitter = EasyMock.createMock(ServiceEmitter.class); EmittingLogger.registerEmitter(emitter); emitter.emit(EasyMock.<ServiceEventBuilder>anyObject()); EasyMock.expectLastCall(); EasyMock.replay(emitter); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()).times(2); EasyMock.expect(autoScaler.terminateWithIds(EasyMock.anyObject())) .andReturn(null); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("fake")) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( Collections.singletonList( NoopTask.create() ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( new TestZkWorker(testTask, "http", "hi", "lo", MIN_VERSION, 1).toImmutable(), new TestZkWorker(testTask, "http", "h1", "n1", INVALID_VERSION).toImmutable(), // Invalid version node new TestZkWorker(testTask, "http", "h2", "n1", INVALID_VERSION).toImmutable() // Invalid version node ) ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); boolean provisionedSomething = provisioner.doProvision(); Assert.assertTrue(provisionedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 1); DateTime createdTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.PROVISION ); Thread.sleep(2000); provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.PROVISION ); DateTime anotherCreatedTime = provisioner.getStats().toList().get(0).getTimestamp(); Assert.assertTrue( createdTime.equals(anotherCreatedTime) ); EasyMock.verify(autoScaler); EasyMock.verify(emitter); EasyMock.verify(runner); } @Test public void testDoSuccessfulTerminate() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(new ArrayList<String>()); EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( new AutoScalingData(new ArrayList<>()) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( new RemoteTaskRunnerWorkItem( testTask.getId(), testTask.getType(), null, TaskLocation.unknown(), testTask.getDataSource() ).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( ImmutableList.of( new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask).toImmutable() ) ).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).getWorker())); EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); boolean terminatedSomething = provisioner.doTerminate(); Assert.assertTrue(terminatedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 1); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.TERMINATE ); EasyMock.verify(autoScaler); } @Test public void testSomethingTerminating() { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(3); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Collections.singletonList("ip")).times(2); EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( new AutoScalingData(Collections.singletonList("ip")) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getWorkers()).andReturn( ImmutableList.of( new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask).toImmutable(), new TestZkWorker(testTask).toImmutable() ) ).times(2); EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).toImmutable().getWorker())); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); boolean terminatedSomething = provisioner.doTerminate(); Assert.assertTrue(terminatedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 1); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.TERMINATE ); terminatedSomething = provisioner.doTerminate(); Assert.assertFalse(terminatedSomething); Assert.assertTrue(provisioner.getStats().toList().size() == 1); Assert.assertTrue( provisioner.getStats().toList().get(0).getEvent() == ScalingStats.EVENT.TERMINATE ); EasyMock.verify(autoScaler); EasyMock.verify(runner); } @Test public void testNoActionNeeded() { EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Collections.singletonList("ip")); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( Collections.singletonList( (Task) NoopTask.create() ) ).times(1); EasyMock.expect(runner.getWorkers()).andReturn( Arrays.asList( new TestZkWorker(NoopTask.create()).toImmutable(), new TestZkWorker(NoopTask.create()).toImmutable() ) ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); boolean terminatedSomething = provisioner.doTerminate(); Assert.assertFalse(terminatedSomething); EasyMock.verify(autoScaler); EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Collections.singletonList("ip")); EasyMock.replay(autoScaler); boolean provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); EasyMock.verify(autoScaler); EasyMock.verify(runner); } @Test public void testMinCountIncrease() { // Don't terminate anything EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Collections.singletonList("ip")); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( Collections.emptyList() ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( Collections.singletonList( new TestZkWorker(NoopTask.create(), "http", "h1", "i1", MIN_VERSION).toImmutable() ) ).times(3); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(2); EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); boolean terminatedSomething = provisioner.doTerminate(); Assert.assertFalse(terminatedSomething); EasyMock.verify(autoScaler); // Don't provision anything EasyMock.reset(autoScaler); EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(1).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Collections.singletonList("ip")); EasyMock.replay(autoScaler); boolean provisionedSomething = provisioner.doProvision(); Assert.assertFalse(provisionedSomething); EasyMock.verify(autoScaler); EasyMock.reset(autoScaler); // Increase minNumWorkers EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) .andReturn(Collections.singletonList("ip")); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("h3")) ); // Should provision two new workers EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("h4")) ); EasyMock.replay(autoScaler); provisionedSomething = provisioner.doProvision(); Assert.assertTrue(provisionedSomething); EasyMock.verify(autoScaler); EasyMock.verify(runner); } @Test public void testNullWorkerConfig() { workerConfig.set(null); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( Collections.singletonList( NoopTask.create() ) ).times(1); EasyMock.expect(runner.getWorkers()).andReturn( Collections.singletonList( new TestZkWorker(null).toImmutable() ) ).times(2); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); boolean terminatedSomething = provisioner.doTerminate(); boolean provisionedSomething = provisioner.doProvision(); Assert.assertFalse(terminatedSomething); Assert.assertFalse(provisionedSomething); EasyMock.verify(autoScaler); EasyMock.verify(runner); } private static class TestZkWorker extends ZkWorker { private final Task testTask; public TestZkWorker( Task testTask ) { this(testTask, "http", "host", "ip", MIN_VERSION); } public TestZkWorker( Task testTask, String scheme, String host, String ip, String version ) { this(testTask, scheme, host, ip, version, 1); } public TestZkWorker( Task testTask, String scheme, String host, String ip, String version, int capacity ) { super(new Worker(scheme, host, ip, capacity, version, WorkerConfig.DEFAULT_CATEGORY), null, new DefaultObjectMapper()); this.testTask = testTask; } @Override public Map<String, TaskAnnouncement> getRunningTasks() { if (testTask == null) { return new HashMap<>(); } return ImmutableMap.of( testTask.getId(), TaskAnnouncement.create( testTask, TaskStatus.running(testTask.getId()), TaskLocation.unknown() ) ); } } }
apache/incubator-retired-wave
37,262
wave/src/main/java/org/waveprotocol/wave/client/editor/event/EditorEventHandler.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.client.editor.event; import com.google.common.annotations.VisibleForTesting; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Node; import com.google.gwt.dom.client.Text; import com.google.gwt.event.dom.client.KeyCodes; import org.waveprotocol.wave.client.common.util.DomHelper; import org.waveprotocol.wave.client.common.util.EventWrapper; import org.waveprotocol.wave.client.common.util.KeyCombo; import org.waveprotocol.wave.client.common.util.QuirksConstants; import org.waveprotocol.wave.client.common.util.SignalEvent; import org.waveprotocol.wave.client.common.util.SignalEvent.KeyModifier; import org.waveprotocol.wave.client.common.util.SignalEvent.KeySignalType; import org.waveprotocol.wave.client.common.util.SignalEvent.MoveUnit; import org.waveprotocol.wave.client.common.util.UserAgent; import org.waveprotocol.wave.client.debug.logger.LogLevel; import org.waveprotocol.wave.client.editor.EditorStaticDeps; import org.waveprotocol.wave.client.editor.constants.BrowserEvents; import org.waveprotocol.wave.client.editor.content.ContentElement; import org.waveprotocol.wave.client.editor.content.ContentNode; import org.waveprotocol.wave.client.editor.content.ContentPoint; import org.waveprotocol.wave.client.editor.content.FocusedContentRange; import org.waveprotocol.wave.client.editor.content.NodeEventRouter; import org.waveprotocol.wave.client.editor.event.CompositionEventHandler.CompositionListener; import org.waveprotocol.wave.client.scheduler.Scheduler; import org.waveprotocol.wave.client.scheduler.SchedulerInstance; import org.waveprotocol.wave.client.scheduler.SchedulerTimerService; import org.waveprotocol.wave.client.scheduler.TimerService; import org.waveprotocol.wave.common.logging.LoggerBundle; import org.waveprotocol.wave.model.document.AnnotationBehaviour.CursorDirection; import org.waveprotocol.wave.model.document.util.FocusedPointRange; import org.waveprotocol.wave.model.document.util.Point; /** * Central event handler for the editor, encapsulating the core logic for event * routing and handling. Application specific handling for combos, etc are done via a * subhandler. * * TODO(user): Remove gwt dependencies so that this is junit testable. * * @author danilatos@google.com (Daniel Danilatos) * @author mtsui@google.com (Mark Tsui) */ public final class EditorEventHandler { /** * States the event handler may be in. */ // TODO(danilatos): Consider separating out other states from normal, such as // TYPING, CLIPBOARD, etc, when we are in these transient states. enum State { /** Normal state */ NORMAL, /** IME composition state */ COMPOSITION } /** Reduces the times selection logging is sent to eye3, reporting seems linear with users. */ private static final int SELECTION_LOG_CULL_FACTOR = 100; // 1/100 sent private static final LoggerBundle logger = EditorStaticDeps.logger; /** * Sets whether unsafe key events are cancelled (set to false for testing) */ private static boolean cancelUnsafeKeyEvents = true; private final CompositionListener<EditorEvent> compositionListener = new CompositionListener<EditorEvent>() { @Override public void compositionStart(EditorEvent event) { EditorEventHandler.this.compositionStart(event); } @Override public void compositionUpdate() { EditorEventHandler.this.compositionUpdate(); } @Override public void compositionEnd() { EditorStaticDeps.startIgnoreMutations(); try { EditorEventHandler.this.compositionEnd(); } finally { EditorStaticDeps.endIgnoreMutations(); } } }; private final boolean weirdComposition = QuirksConstants.MODIFIES_DOM_AND_FIRES_TEXTINPUT_AFTER_COMPOSITION; private final boolean useCompositionEvents; /** * Sets whether we use whitelisting or blacklisting to potentially cancel * unhandled keycombos. */ private final boolean useWhiteListing; /** * Current selection. Ensure this is always set correctly, especially * if it's changed or invalidated. */ private FocusedContentRange cachedSelection; /** * Interact with the editor through this interface. */ private final EditorInteractor editorInteractor; private final NodeEventRouter router; /** * We keep track of whether selection affinity is up to date. When we receive * an event, we assume that the event will invalidate the selection affinity, * thus we set selectionAffinityMaybeChanged to true. If we later find out * that the event does not modify selection affinity, we set * selectionAffinityMaybeChanged to false. * * If at the end of the event loop, selectionAffinityMaybeChanged */ private boolean needToSetSelectionAffinity = true; private boolean selectionAffinityMaybeChanged = true; /** Tracks whether there was selection at the start of an event handling run. */ private boolean hadInitialSelection; private State state = State.NORMAL; /** * Handler for higher level, application specific event handling. */ private final EditorEventsSubHandler subHandler; private final CompositionEventHandler<EditorEvent> compositionHandler; /** * @param editorInteractor * @param subHandler */ public EditorEventHandler(EditorInteractor editorInteractor, EditorEventsSubHandler subHandler, NodeEventRouter router, boolean useWhiteListFlag, boolean useWebkitCompositionFlag) { this(new SchedulerTimerService(SchedulerInstance.get(), Scheduler.Priority.CRITICAL), editorInteractor, subHandler, router, useWhiteListFlag, // We may want to turn off composition events for webkit if something goes wrong... QuirksConstants.SUPPORTS_COMPOSITION_EVENTS && (UserAgent.isWebkit() ? useWebkitCompositionFlag : true)); } EditorEventHandler(TimerService criticalTimerService, EditorInteractor interactor, EditorEventsSubHandler subHandler, NodeEventRouter router, boolean useWhiteListing, boolean useCompositionEvents) { this.editorInteractor = interactor; this.subHandler = subHandler; this.router = router; this.useWhiteListing = useWhiteListing; this.compositionHandler = new CompositionEventHandler<EditorEvent>( criticalTimerService, compositionListener, logger, weirdComposition); this.useCompositionEvents = useCompositionEvents; } /** Visible for testing */ State getState() { return state; } static int selectionLogCullRotation = 0; /** * @param signal * @return true if its handled */ public boolean handleEvent(EditorEvent signal) { if (editorInteractor.notifyListeners(signal)) { // The listeners themselves can cancel the event if they wish. return false; } // Wraps handleEventInner to update the selectionAffinity variables. selectionAffinityMaybeChanged = true; hadInitialSelection = editorInteractor.hasContentSelection(); boolean retVal = true; try { retVal = handleEventInner(signal); } catch (SelectionLostException e) { if (e.hasLostSelection() && (LogLevel.showDebug() || (selectionLogCullRotation++ % SELECTION_LOG_CULL_FACTOR) == 0)) { EditorStaticDeps.logger.error().log(e); } // NOTE(patcoleman): we assume that if there was no selection to start with, that the // html selection is inside a part with no corresponding content node (e.g. inside doodad // or textbox). In this case it's not cancelled, so the browser can deal with it. retVal = e.hasLostSelection(); } if (selectionAffinityMaybeChanged) { needToSetSelectionAffinity = true; } return retVal; } private boolean handleEventInner(EditorEvent event) throws SelectionLostException { // TODO(danilatos): IE IME keycode thingy!! invalidateSelection(); // NOTE(patcoleman): special cases FTW! // 1) click can be while the editor isn't editing, so needs to avoid needing content selection. if (event.isMouseEvent()) { // Flush because the selection location may have changed to somewhere // else in the same text node. We MUST handle mouse down events for // this. editorInteractor.forceFlush(); ContentElement node = editorInteractor.findElementWrapper(event.getTarget()); event.setCaret(new ContentPoint(node, null)); if (node != null && event.isClickEvent()) { router.handleClick(node, event); editorInteractor.clearCaretAnnotations(); editorInteractor.rebiasSelection(CursorDirection.NEUTRAL); return !event.shouldAllowBrowserDefault(); } else { return false; } } // 2) Only update selection if we know it's needed: if (checkIfValidSelectionNeeded(event)) { refreshEditorWithCaret(event); if (cachedSelection == null) { // disallow events if we don't know where the selection is - probably something's botched // lars: only in editing mode; otherwise we block, e.g., keyboard manipulation // of radio buttons return editorInteractor.isEditing(); } } if (weirdComposition && state == State.COMPOSITION) { if (!event.isCompositionEvent()) { compositionHandler.handleOtherEvent(); } } // Handle: if (event.isKeyEvent()) { return handleKeyEvent(event); } else if (event.isCompositionEvent()) { if (useCompositionEvents) { return handleCompositionEvent(event); } else { return false; } } else if (event.isClipboardEvent()) { if (event.isPasteEvent()) { return subHandler.handlePaste(event); } else if (event.isCutEvent()) { return subHandler.handleCut(event); } else if (event.isCopyEvent()) { return subHandler.handleCopy(event); } else { // These are onbeforecopy/onbeforepaste etc.. We are not currently // interested, and they are harmless so just allow. return false; } } else if (event.isMutationEvent()) { selectionAffinityMaybeChanged = false; if (!editorInteractor.isExpectingMutationEvents()) { // Don't trust in DomHelper.isTextNode(event.getTarget()) // to detect DOM mutations in text because target property // is inconsistent among browsers {@DOMImplWebkit#eventGetTarget} if (event.isDOMCharacterEvent()) { cachedSelection = editorInteractor.getSelectionPoints(); if (cachedSelection != null) { if (!cachedSelection.isCollapsed()) { logger.trace().logPlainText("WARNING: Probable IME input on non-collapsed " + "range not handled!!!"); // TODO(dan/patcoleman): Yeargh, IME killing a range!!! Nooo!!!! // Handle eeet } logger.trace().logPlainText("Notifying typing extractor for " + "probable IME-caused mutation event"); // Nothing to do with the return value of this method, as mutation // events are not cancellable. editorInteractor.notifyTypingExtractor(cachedSelection.getFocus(), false, false); } } } if (QuirksConstants.LIES_ABOUT_CARET_AT_LINK_END_BOUNDARY) { checkForWebkitEndOfLinkHack(event); } subHandler.handleDomMutation(event); return false; } else if (event.isFocusEvent()) { return false; } else { // cancel anything we don't know about logger.trace().log("Cancelling: " + event.getType()); return true; } } void checkForWebkitEndOfLinkHack(SignalEvent signal) { // If it's inserting text if (DomHelper.isTextNode(signal.getTarget()) && (signal.getType().equals(BrowserEvents.DOMCharacterDataModified) || signal.getType().equals(BrowserEvents.DOMNodeInserted))) { Text textNode = signal.getTarget().cast(); if (textNode.getLength() > 0) { Node e = textNode.getPreviousSibling(); if (e != null && !DomHelper.isTextNode(e) && e.<Element>cast().getTagName().toLowerCase().equals("a")) { FocusedPointRange<Node> selection = editorInteractor.getHtmlSelection(); if (selection.isCollapsed() && selection.getFocus().getTextOffset() == 0) { editorInteractor.noteWebkitEndOfLinkHackOccurred(textNode); } } } } } private boolean handleKeyEvent(EditorEvent event) throws SelectionLostException { KeySignalType keySignalType = event.getKeySignalType(); switch (state) { case NORMAL: if (isAccelerator(event)) { refreshEditorWithCaret(event); if (subHandler.handleCommand(event) || subHandler.handleBlockLevelCommands(event, cachedSelection.asOrderedRange(editorInteractor.selectionIsOrdered()))) { return true; } if (cachedSelection.isCollapsed()) { if (subHandler.handleCollapsedKeyCombo(event, cachedSelection.getFocus())) { return true; } } else { if (subHandler.handleRangeKeyCombo(event, cachedSelection.asOrderedRange(editorInteractor.selectionIsOrdered()))) { return true; } } return shouldCancelAcceleratorBrowserDefault(event); } switch(keySignalType) { case INPUT: case DELETE: return handleInputOrDeleteKeyEvent(event, keySignalType); case NAVIGATION: return handleNavigationKeyEvents(event); case NOEFFECT: return false; } throw new RuntimeException("Unhandled signal type"); case COMPOSITION: // NOTE(danilatos): From my investigations, during IME composition, the browser itself // pretty much disables all the combos. Or, it has its own strange buggy behaviour // without us doing anything. Therefore, we can pretty much ignore key events during // composition mode. return false; default: throw new RuntimeException("Unhandled state"); } } private boolean handleCompositionEvent(EditorEvent event) { return compositionHandler.handleCompositionEvent(event, event.getType()); } private void compositionStart(EditorEvent event) { if (state == State.COMPOSITION) { logger.error().log("State was already IME during a compositionstart event!"); } Point<ContentNode> caret; if (cachedSelection == null) { logger.error().log("No selection during a composition start event? Maybe it's " + "deep inside some doodad's html?"); caret = null; } else if (cachedSelection.isCollapsed()) { caret = cachedSelection.getFocus(); } else { caret = deleteCachedSelectionRangeAndInvalidate(true); } state = State.COMPOSITION; editorInteractor.compositionStart(caret); } private void compositionUpdate() { editorInteractor.compositionUpdate(); } private void compositionEnd() { // We update the cached selection because sometimes we'll immediately get called back // into compositionStart() cachedSelection = editorInteractor.compositionEnd(); state = State.NORMAL; } private boolean handleInputOrDeleteKeyEvent(EditorEvent event, KeySignalType keySignalType) throws SelectionLostException { // !!!!!!!!! // TODO(danilatos): This caret is in the wrong (full) view, and can die when // applied to mutable doc!!!! Only OK right now out of sheer luck. // !!!!!!!!! Point<ContentNode> caret; boolean isCollapsed = editorInteractor.getHtmlSelection() != null && editorInteractor.getHtmlSelection().isCollapsed(); boolean isReplace = false; if (isCollapsed) { MoveUnit moveUnit = event.getMoveUnit(); if (moveUnit != MoveUnit.CHARACTER) { if (event.getMoveUnit() == MoveUnit.WORD) { if (event.getKeyCode() == KeyCodes.KEY_BACKSPACE) { refreshEditorWithCaret(event); caret = cachedSelection.getFocus(); editorInteractor.deleteWordEndingAt(caret); } else if (event.getKeyCode() == KeyCodes.KEY_DELETE){ refreshEditorWithCaret(event); caret = cachedSelection.getFocus(); editorInteractor.deleteWordStartingAt(caret); } } // TODO(user): Manually handle line/other etc. deletes, because // they might contain formatting, etc. For now, cancelling for safety. return true; } else { // HACK(danilatos/patcoleman): We don't want the caret to get set here, // because it is not safe unless we continually flush the typing extractor // which is undesirable. // NOTE #XYZ (this comment referenced from elsewhere) // To fix this properly, we need to restructure the control flow, and // possibly change the types of caret we pass around. caret = null; } } else { refreshEditorWithCaret(event); // NOTE: at this point, should be either INPUT or DELETE boolean isDelete = (keySignalType == KeySignalType.DELETE); if (event.isImeKeyEvent()) { // Semi-HACK(danilatos): sometimes during composition, the selection will be reported // as a range. We want to leave this alone, not delete it. Since we're not handling // ranged deletions with non-FF ime input properly anyway, this will do. caret = cachedSelection.getFocus(); } else { caret = deleteCachedSelectionRangeAndInvalidate(!isDelete); // keep annotations on insert } if (isDelete) { return true; // Did a range delete already. Do not go on to typing extractor. } else { isReplace = true; } } if (keySignalType == KeySignalType.DELETE) { refreshEditorWithCaret(event); caret = cachedSelection.getFocus(); ContentNode node = caret.getContainer(); editorInteractor.checkpoint(new FocusedContentRange(caret)); switch (EventWrapper.getKeyCombo(event)) { case BACKSPACE: case SHIFT_BACKSPACE: editorInteractor.rebiasSelection(CursorDirection.FROM_RIGHT); return router.handleBackspace(node, event); case SHIFT_DELETE: if (!QuirksConstants.HAS_OLD_SCHOOL_CLIPBOARD_SHORTCUTS) { // On a mac, shift+delete is the same as regular delete. editorInteractor.rebiasSelection(CursorDirection.FROM_LEFT); return router.handleDelete(node, event); } else { // On windows & linux, shift+delete is cut // It should have been caught earlier by the isAccelerator check throw new RuntimeException("Shift delete should have been caught" + "as an accelerator event!"); } case DELETE: editorInteractor.rebiasSelection(CursorDirection.FROM_LEFT); return router.handleDelete(node, event); } } else if (handleEventsManuallyOnNode(event, caret)){ return true; } return handleNormalTyping(event, caret, isReplace); } private Point<ContentNode> deleteCachedSelectionRangeAndInvalidate(boolean isReplace) { // !!!!!!!!! // TODO(danilatos): This caret is in the wrong (full) view, and can die when // applied to mutable doc!!!! Only OK right now out of sheer luck. // !!!!!!!!! editorInteractor.checkpoint(cachedSelection); Point<ContentNode> start; Point<ContentNode> end; if (editorInteractor.selectionIsOrdered()) { start = cachedSelection.getAnchor(); end = cachedSelection.getFocus(); } else { end = cachedSelection.getAnchor(); start = cachedSelection.getFocus(); } Point<ContentNode> caret = null; caret = editorInteractor.deleteRange(start, end, isReplace); setCaret(caret); assert cachedSelection == null; return caret; } private boolean handleNormalTyping(EditorEvent event, Point<ContentNode> caret, boolean isReplace) throws SelectionLostException { // Note that caret may be null if this is called during typing extraction // Normal typing selectionAffinityMaybeChanged = false; // NOTE(danilatos): We can't tell if a key event is IME in firefox, so // we just always do typing extraction instead. // Additionally, even for normal key strokes, firefox has strange // behaviour when handling them programmatically. The cursor appears // to lag a character behind, and there are selection half-disappearing // issues when deleting around annotation boundaries. boolean useTypingExtractor = event.isImeKeyEvent() || UserAgent.isFirefox(); if (useTypingExtractor) { // Just normal typing. Send to typing extractor. if (editorInteractor.isTyping()) { // NOTE(patcoleman): Do not change affinity while normal typing, our affinity should // remain consistent across normal typing. logger.trace().log("Not notifying typing extractor, already notified"); } else { if (UserAgent.isFirefox()) { // NOTE(user): This is one way of handling the affinity problem. // The other method is to detect where the selection is, and modify // the behaviour of typing extractor/document such that when the // typing is extracted, the formatting applied to the content doc // matches the html impl. // TODO(user): This doesn't handle the case for persistent inline // elements where the browser may automatically place the cursor. We // don't currently have such elements, but we'll need to consider // this case in the future. refreshEditorWithCaret(event); caret = maybeSetSelectionLeftAffinity(event.getCaret().asPoint()); event.setCaret(ContentPoint.fromPoint(caret)); } else { // Caret might be null } logger.trace().log("Notifying typing extractor"); return editorInteractor.notifyTypingExtractor(caret, caret == null, isReplace); } return false; } else { char c = (char) event.getKeyCode(); refreshEditorWithCaret(event); caret = cachedSelection.getFocus(); // Is it safe to delete this line? caret = editorInteractor.insertText(caret, String.valueOf(c), isReplace); caret = editorInteractor.normalizePoint(caret); setCaret(caret); editorInteractor.rebiasSelection(CursorDirection.FROM_LEFT); return true; } } private boolean handleEventsManuallyOnNode(EditorEvent event, Point<ContentNode> caret) throws SelectionLostException { // Note that caret may be null if this is called during typing extraction // Always handle enter specially, and always cancel the default action. // TODO(danilatos): This is still a slight anomaly, to call a // node.handleXYZ method here. if (event.isOnly(KeyCodes.KEY_ENTER)) { refreshEditorWithCaret(event); caret = event.getCaret().asPoint(); editorInteractor.checkpoint(new FocusedContentRange(caret)); router.handleEnter(caret.getContainer(), event); editorInteractor.rebiasSelection(CursorDirection.FROM_LEFT); return true; } else if (event.isCombo(KeyCodes.KEY_ENTER, KeyModifier.SHIFT)) { // shift+enter inserts a "newline" (such as a <br/>) by default // TODO(danilatos): Form elements want to handle this. return true; } return false; } private boolean handleNavigationKeyEvents(EditorEvent event) { editorInteractor.checkpoint(null); editorInteractor.clearCaretAnnotations(); ContentNode node = cachedSelection.getFocus().getContainer(); logger.trace().log("Navigation event"); // Not using key combo, because we want to handle left key with // any modifiers also applying. // TODO(danilatos): MoveUnit, and holding down shift for selection. if (event.getKeyCode() == KeyCodes.KEY_LEFT) { router.handleLeft(node, event); editorInteractor.rebiasSelection(CursorDirection.FROM_RIGHT); return !event.shouldAllowBrowserDefault(); } else if (event.getKeyCode() == KeyCodes.KEY_RIGHT) { router.handleRight(node, event); editorInteractor.rebiasSelection(CursorDirection.FROM_LEFT); return !event.shouldAllowBrowserDefault(); } else { editorInteractor.rebiasSelection(CursorDirection.NEUTRAL); } return false; } private Point<ContentNode> maybeSetSelectionLeftAffinity(Point<ContentNode> caret) { if (!needToSetSelectionAffinity) { return caret; } needToSetSelectionAffinity = false; Point<ContentNode> newCaret = editorInteractor.normalizePoint(caret); if (newCaret != caret) { editorInteractor.setCaret(newCaret); } return newCaret; } /** * Tells us if this key event is an "accelerator" key event. * * For lack of a better word, basically this means keys & combos that aren't * used for basic input, deletion, and navigation. See the implementation * comments for details. * * @param event Must be a key event! * @return true if this event is an accelerator key sequence. */ static boolean isAccelerator(SignalEvent event) { return isAcceleratorInner(event, UserAgent.isMac(), QuirksConstants.HAS_OLD_SCHOOL_CLIPBOARD_SHORTCUTS); } /** * Parameterised to allow testing different browser/os permuations * @param event * @param isMac * @param quirksHasOldSchoolClipboardShortcuts */ @VisibleForTesting static boolean isAcceleratorInner(SignalEvent event, boolean isMac, boolean quirksHasOldSchoolClipboardShortcuts) { switch (event.getKeySignalType()) { case INPUT: // Alt on its own is a simple modifier, like shift, on OSX boolean maybeAltKey = !isMac && event.getAltKey(); // NOTE(user): Perhaps we should create a registry in // EditorEventSubHandler of non-metesque like command keys such as TAB. // For now TAB is our only special case, but we may need to allow // implementers to define arbitrary keys as accelerators. return event.getCtrlKey() || event.getMetaKey() || event.getKeyCode() == KeyCodes.KEY_TAB || maybeAltKey; case DELETE: if (quirksHasOldSchoolClipboardShortcuts && event.getKeyCode() == KeyCodes.KEY_DELETE && KeyModifier.SHIFT.check(event)) { // shift+delete on windows/linux is cut // (shift+insert and ctrl+insert are other clipboard alternatives, // but that's handled below). return true; } else { return false; } case NAVIGATION: // All navigation does not count return false; case NOEFFECT: // Random special keys like ESC, F7, TAB, INS, etc count return true; } throw new RuntimeException("Unknown KeySignal type"); } /** * @param acceleratorEvent Must be a key event AND isAccelerator(event) == true * @return whether we should cancel the browser's default action */ private boolean shouldCancelAcceleratorBrowserDefault(SignalEvent acceleratorEvent) { // (more verbose name in argument to remind us of the constraint). SignalEvent event = acceleratorEvent; // First, handle non-combo events (here they should only be "NOEFFECT" keys) // We use blacklisting for these. // TODO(danilatos/mtsui): Switch to whitelisting as well? if (KeyModifier.NONE.check(event)) { if (event.getKeyCode() == EventWrapper.KEY_INSERT) { // Cancel INSERT to prevent overwrite mode, for now // (Happens in IE). return true; } else { // Other things like ESC, TAB, function keys, etc are OK. return cancelUnsafeKeyEvents; } } if (isAllowableCombo(event)) { // We can safely ignore logger.trace().log("Allowing event"); return false; } if (logger.trace().shouldLog()) { logger.trace().log("unsafe combo: ", event.getType(), event.getKeyCode()); } return cancelUnsafeKeyEvents; } private boolean isAllowableCombo(SignalEvent sEvent) { // Detect inconsistency between whitelist and blacklist. checkBlackWhiteListConsistency(sEvent); if (isWhiteListedCombo(sEvent)) { return true; } if (useWhiteListing) { // If we are using whitelisting, disallow all events that didn't pass the // above check. return false; } else { // TODO(user): Log a sample of these combos to the server, so we can // analyse these and perhaps add a class of keys to the whitelist. Also // store this string somewhere so in case an exception is thrown later, it // can be associated with this event. if (logger.trace().shouldLog()) { logger.trace().log("not in whitelist: ", sEvent); } // Otherwise return allow events that are not in the blacklist. return !isBlackListedCombo(sEvent); } } private boolean checkBlackWhiteListConsistency(SignalEvent sEvent) { boolean isConsistent = !(isWhiteListedCombo(sEvent) && isBlackListedCombo(sEvent)); if (!isConsistent) { String message = "Combo both whitelisted and blacklisted! " + sEvent.getKeyCode(); assert false : message; logger.error().logPlainText(message); } return isConsistent; } /** * These key combos can be safely ignored. They don't directly modify the * editable region, but may perform something useful on the browser so we * don't want to cancel them. i.e. copy/cut/paste key events. * * Combos listed here should be accompanied with a comment stating the reason. * * Maintaining this whitelist is quite an effort, but at least we shouldn't * get the browser blowing up if the user entered some keycombo we don't know * about. * * * References: * http://support.mozilla.com/en-US/kb/Keyboard+shortcuts * http://docs.info.apple.com/article.html?artnum=42951 * http://www.microsoft.com/windows/products/winfamily/ie/quickref.mspx * * @return true if it is safe to ignore, or false which will result in further * handling. */ private boolean isWhiteListedCombo(SignalEvent signal) { KeyCombo keyCombo = EventWrapper.getKeyCombo(signal); switch (keyCombo) { // Edit actions: // Allow cut/copy/paste combos and handle the actual clipboard events // later. case ORDER_C: // copy case ORDER_X: // cut case ORDER_V: // paste case ORDER_A: // select all case ORDER_P: // print case ORDER_L: // navigate to url box // Page navigation // On safari, delete/backspace is normally used to go back as well, but // of course in the editor we won't allow that. case META_LEFT: // back case META_RIGHT: // forward case META_HOME: // home case ORDER_O: // open file case ORDER_R: // reload case ORDER_SHIFT_R: // reload (override cache) // Search case ORDER_F: // find case ORDER_G: // find again // tools case ORDER_D: // bookmark this page // Window and tabs case ORDER_N: // new window case ORDER_T: // new tab case ORDER_W: // close window case ORDER_Q: // quit return true; default: } if (QuirksConstants.HAS_OLD_SCHOOL_CLIPBOARD_SHORTCUTS) { if (isAlternateClipboardCombo(signal)) { return true; } } if (UserAgent.isSafari() && UserAgent.isMac()) { // Navigation events for Mac Safari only. switch (keyCombo) { case CTRL_A: case CTRL_B: case CTRL_E: case CTRL_F: return true; default: } } return false; } private boolean isBlackListedCombo(SignalEvent event) { KeyCombo keyCombo = EventWrapper.getKeyCombo(event); switch (keyCombo) { // Disallow undo case ORDER_Z: return true; } if (UserAgent.isMac()) { switch (keyCombo) { case CTRL_D: // Deletes a character, needs to be handled manually case CTRL_H: // Deletes a character backwards case CTRL_K: // Deletes to end of line, needs to be handled manually return true; } if (UserAgent.isFirefox()) { switch (keyCombo) { case CTRL_W: // Deletes a word backwards return true; case CTRL_U: // Kills line // NOTE(user): Implement this when Firefox updates their selection API. return true; } } if (UserAgent.isWebkit()) { switch (keyCombo) { case CTRL_O: // Inserts a new line return true; } } } if (QuirksConstants.PLAINTEXT_PASTE_DOES_NOT_EMIT_PASTE_EVENT && keyCombo == KeyCombo.ORDER_ALT_SHIFT_V) { return true; } return false; } private boolean isAlternateClipboardCombo(SignalEvent signal) { switch (EventWrapper.getKeyCombo(signal)) { // Edit actions: // Allow cut/copy/paste combos and handle the actual clipboard events // later. case SHIFT_DELETE: // cut (win + linux only) case CTRL_INSERT: // copy (win + linux only) case SHIFT_INSERT: // paste (win + linux only) return true; default: return false; } } // If any of these abstract methods return true, we stop processing the signal // We also prevent default for those named with "handleXYZ" if true is returned private void setCaret(Point<ContentNode> caret) { invalidateSelection(); editorInteractor.setCaret(caret); } private void invalidateSelection() { cachedSelection = null; } /** * Flushes the editor, and updates the caret of the event to be the new start of selection. */ private void refreshEditorWithCaret(EditorEvent event) throws SelectionLostException { // NOTE(patcoleman): don't call interactor's flush outside here - it is possible the rest of the // event states will not be updated correctly. editorInteractor.forceFlush(); cachedSelection = editorInteractor.getSelectionPoints(); if (cachedSelection != null) { event.setCaret(ContentPoint.fromPoint(cachedSelection.getFocus())); } else { throw new SelectionLostException("Null selection after force flushing editor, " + "event = " + event.getType(), hadInitialSelection); } } /** * A check extracted out, to see whether a particular event requires a valid refreshed selection. */ private boolean checkIfValidSelectionNeeded(EditorEvent event) { if (event.isMutationEvent() || event.isFocusEvent()) { return false; // mutations or focus don't mutate the document at this stage. } else if (event.isKeyEvent() && state == State.NORMAL) { if (event.isImeKeyEvent()) { return false; // ime typing can be extracted not on firefox } else if(event.getKeySignalType() == KeySignalType.INPUT) { return false; // normal typing can be extracted on firefox } } return true; } /** * This may not be always correct, but may be useful when the selection is * not otherwise available, i.e. when the editor is blurred. */ public FocusedContentRange getCachedSelection() { return cachedSelection; } /** * Sets whether unsafe combos are cancelled. */ public static void setCancelUnsafeCombos(boolean shouldCancel) { cancelUnsafeKeyEvents = shouldCancel; } /** * Gets whether unsafe combos are cancelled. */ public static boolean getCancelUnsafeCombos() { return cancelUnsafeKeyEvents; } /** * Checked exception for finding any places the editor unexpectedly * has no selection - as this probably indicates a bug. */ private static class SelectionLostException extends Exception { private final boolean lostSelection; public SelectionLostException(String message, boolean lost) { super(message + ". Selection was " + (lost ? "" : "not ") + "lost."); this.lostSelection = lost; } public boolean hasLostSelection() { return lostSelection; } } }
apache/seatunnel
37,168
seatunnel-connectors-v2/connector-jdbc/src/main/java/org/apache/seatunnel/connectors/seatunnel/jdbc/internal/dialect/JdbcDialect.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.seatunnel.connectors.seatunnel.jdbc.internal.dialect; import org.apache.seatunnel.shade.org.apache.commons.lang3.StringUtils; import org.apache.seatunnel.api.table.catalog.TablePath; import org.apache.seatunnel.api.table.catalog.TableSchema; import org.apache.seatunnel.api.table.converter.BasicTypeDefine; import org.apache.seatunnel.api.table.converter.TypeConverter; import org.apache.seatunnel.api.table.schema.event.AlterTableAddColumnEvent; import org.apache.seatunnel.api.table.schema.event.AlterTableChangeColumnEvent; import org.apache.seatunnel.api.table.schema.event.AlterTableColumnEvent; import org.apache.seatunnel.api.table.schema.event.AlterTableColumnsEvent; import org.apache.seatunnel.api.table.schema.event.AlterTableDropColumnEvent; import org.apache.seatunnel.api.table.schema.event.AlterTableModifyColumnEvent; import org.apache.seatunnel.api.table.schema.event.SchemaChangeEvent; import org.apache.seatunnel.api.table.type.SqlType; import org.apache.seatunnel.connectors.seatunnel.jdbc.config.JdbcConnectionConfig; import org.apache.seatunnel.connectors.seatunnel.jdbc.internal.connection.JdbcConnectionProvider; import org.apache.seatunnel.connectors.seatunnel.jdbc.internal.connection.SimpleJdbcConnectionProvider; import org.apache.seatunnel.connectors.seatunnel.jdbc.internal.converter.JdbcRowConverter; import org.apache.seatunnel.connectors.seatunnel.jdbc.internal.dialect.dialectenum.FieldIdeEnum; import org.apache.seatunnel.connectors.seatunnel.jdbc.source.JdbcSourceTable; import org.apache.seatunnel.connectors.seatunnel.jdbc.utils.DefaultValueUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import static java.lang.String.format; /** * Represents a dialect of SQL implemented by a particular JDBC system. Dialects should be immutable * and stateless. */ public interface JdbcDialect extends Serializable { Logger log = LoggerFactory.getLogger(JdbcDialect.class.getName()); /** * Get the name of jdbc dialect. * * @return the dialect name. */ String dialectName(); /** * Get converter that convert jdbc object to seatunnel internal object. * * @return a row converter for the database */ JdbcRowConverter getRowConverter(); /** * Get converter that convert type object to seatunnel internal type. * * @return a type converter for the database */ default TypeConverter<BasicTypeDefine> getTypeConverter() { throw new UnsupportedOperationException("TypeConverter is not supported"); } /** * get jdbc meta-information type to seatunnel data type mapper. * * @return a type mapper for the database */ JdbcDialectTypeMapper getJdbcDialectTypeMapper(); default String hashModForField(String nativeType, String fieldName, int mod) { return hashModForField(fieldName, mod); } default String hashModForField(String fieldName, int mod) { return "ABS(MD5(" + quoteIdentifier(fieldName) + ") % " + mod + ")"; } /** Quotes the identifier for table name or field name */ default String quoteIdentifier(String identifier) { return identifier; } /** Quotes the identifier for database name or field name */ default String quoteDatabaseIdentifier(String identifier) { return identifier; } default String tableIdentifier(String database, String tableName) { return quoteDatabaseIdentifier(database) + "." + quoteIdentifier(tableName); } /** * Constructs the dialects insert statement for a single row. The returned string will be used * as a {@link java.sql.PreparedStatement}. Fields in the statement must be in the same order as * the {@code fieldNames} parameter. * * <pre>{@code * INSERT INTO table_name (column_name [, ...]) VALUES (value [, ...]) * }</pre> * * @return the dialects {@code INSERT INTO} statement. */ default String getInsertIntoStatement(String database, String tableName, String[] fieldNames) { String columns = Arrays.stream(fieldNames) .map(this::quoteIdentifier) .collect(Collectors.joining(", ")); String placeholders = Arrays.stream(fieldNames) .map(fieldName -> ":" + fieldName) .collect(Collectors.joining(", ")); return String.format( "INSERT INTO %s (%s) VALUES (%s)", tableIdentifier(database, tableName), columns, placeholders); } /** * Constructs the dialects update statement for a single row with the given condition. The * returned string will be used as a {@link java.sql.PreparedStatement}. Fields in the statement * must be in the same order as the {@code fieldNames} parameter. * * <pre>{@code * UPDATE table_name SET col = val [, ...] WHERE cond [AND ...] * }</pre> * * @return the dialects {@code UPDATE} statement. */ default String getUpdateStatement( String database, String tableName, String[] fieldNames, String[] conditionFields, boolean isPrimaryKeyUpdated) { fieldNames = Arrays.stream(fieldNames) .filter( fieldName -> isPrimaryKeyUpdated || !Arrays.asList(conditionFields) .contains(fieldName)) .toArray(String[]::new); String setClause = Arrays.stream(fieldNames) .map(fieldName -> format("%s = :%s", quoteIdentifier(fieldName), fieldName)) .collect(Collectors.joining(", ")); String conditionClause = Arrays.stream(conditionFields) .map(fieldName -> format("%s = :%s", quoteIdentifier(fieldName), fieldName)) .collect(Collectors.joining(" AND ")); return String.format( "UPDATE %s SET %s WHERE %s", tableIdentifier(database, tableName), setClause, conditionClause); } /** * Constructs the dialects delete statement for a single row with the given condition. The * returned string will be used as a {@link java.sql.PreparedStatement}. Fields in the statement * must be in the same order as the {@code fieldNames} parameter. * * <pre>{@code * DELETE FROM table_name WHERE cond [AND ...] * }</pre> * * @return the dialects {@code DELETE} statement. */ default String getDeleteStatement(String database, String tableName, String[] conditionFields) { String conditionClause = Arrays.stream(conditionFields) .map(fieldName -> format("%s = :%s", quoteIdentifier(fieldName), fieldName)) .collect(Collectors.joining(" AND ")); return String.format( "DELETE FROM %s WHERE %s", tableIdentifier(database, tableName), conditionClause); } /** * Generates a query to determine if a row exists in the table. The returned string will be used * as a {@link java.sql.PreparedStatement}. * * <pre>{@code * SELECT 1 FROM table_name WHERE cond [AND ...] * }</pre> * * @return the dialects {@code QUERY} statement. */ default String getRowExistsStatement( String database, String tableName, String[] conditionFields) { String fieldExpressions = Arrays.stream(conditionFields) .map(field -> format("%s = :%s", quoteIdentifier(field), field)) .collect(Collectors.joining(" AND ")); return String.format( "SELECT 1 FROM %s WHERE %s", tableIdentifier(database, tableName), fieldExpressions); } /** * Constructs the dialects upsert statement if supported; such as MySQL's {@code DUPLICATE KEY * UPDATE}, or PostgreSQL's {@code ON CONFLICT... DO UPDATE SET..}. * * <p>If supported, the returned string will be used as a {@link java.sql.PreparedStatement}. * Fields in the statement must be in the same order as the {@code fieldNames} parameter. * * <p>If the dialect does not support native upsert statements, the writer will fallback to * {@code SELECT ROW Exists} + {@code UPDATE}/{@code INSERT} which may have poor performance. * * @return the dialects {@code UPSERT} statement or {@link Optional#empty()}. */ Optional<String> getUpsertStatement( String database, String tableName, String[] fieldNames, String[] uniqueKeyFields); /** * Constructs the dialects upsert statement if supported; such as MySQL's {@code DUPLICATE KEY * UPDATE}, or PostgreSQL's {@code ON CONFLICT... DO UPDATE SET..}. * * <p>If supported, the returned string will be used as a {@link java.sql.PreparedStatement}. * Fields in the statement must be in the same order as the {@code columns in tableSchema} * parameter. * * <p>If the dialect does not support native upsert statements, the writer will fallback to * {@code SELECT ROW Exists} + {@code UPDATE}/{@code INSERT} which may have poor performance. * * @return the dialects {@code UPSERT} statement or {@link Optional#empty()}. */ default Optional<String> getUpsertStatementByTableSchema( String database, String tableName, TableSchema tableSchema, String[] uniqueKeyFields) { return getUpsertStatement( database, tableName, tableSchema.getFieldNames(), uniqueKeyFields); } /** * Different dialects optimize their PreparedStatement * * @return The logic about optimize PreparedStatement */ default PreparedStatement creatPreparedStatement( Connection connection, String queryTemplate, int fetchSize) throws SQLException { PreparedStatement statement = connection.prepareStatement( queryTemplate, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); if (fetchSize == Integer.MIN_VALUE || fetchSize > 0) { statement.setFetchSize(fetchSize); } return statement; } default ResultSetMetaData getResultSetMetaData(Connection conn, String query) throws SQLException { PreparedStatement ps = conn.prepareStatement(query); return ps.getMetaData(); } default String extractTableName(TablePath tablePath) { return tablePath.getSchemaAndTableName(); } default String getFieldIde(String identifier, String fieldIde) { if (StringUtils.isEmpty(fieldIde)) { return identifier; } switch (FieldIdeEnum.valueOf(fieldIde.toUpperCase())) { case LOWERCASE: return identifier.toLowerCase(); case UPPERCASE: return identifier.toUpperCase(); default: return identifier; } } default Map<String, String> defaultParameter() { return new HashMap<>(); } default void connectionUrlParse( String url, Map<String, String> info, Map<String, String> defaultParameter) { defaultParameter.forEach( (key, value) -> { if (!url.contains(key) && !info.containsKey(key)) { info.put(key, value); } }); } default TablePath parse(String tablePath) { return TablePath.of(tablePath); } default String tableIdentifier(TablePath tablePath) { return tablePath.getFullName(); } /** * Approximate total number of entries in the lookup table. * * @param connection The JDBC connection object used to connect to the database. * @param table table info. * @return approximate row count statement. */ default Long approximateRowCntStatement(Connection connection, JdbcSourceTable table) throws SQLException { if (StringUtils.isNotBlank(table.getQuery())) { return SQLUtils.countForSubquery(connection, table.getQuery()); } return SQLUtils.countForTable(connection, tableIdentifier(table.getTablePath())); } /** * Performs a sampling operation on the specified column of a table in a JDBC-connected * database. * * @param connection The JDBC connection object used to connect to the database. * @param table The table in which the column resides. * @param columnName The name of the column to be sampled. * @param samplingRate samplingRate The inverse of the fraction of the data to be sampled from * the column. For example, a value of 1000 would mean 1/1000 of the data will be sampled. * @return Returns a List of sampled data from the specified column. * @throws SQLException If an SQL error occurs during the sampling operation. */ default Object[] sampleDataFromColumn( Connection connection, JdbcSourceTable table, String columnName, int samplingRate, int fetchSize) throws Exception { String sampleQuery; if (StringUtils.isNotBlank(table.getQuery())) { sampleQuery = String.format( "SELECT %s FROM (%s) AS T", quoteIdentifier(columnName), table.getQuery()); } else { sampleQuery = String.format( "SELECT %s FROM %s", quoteIdentifier(columnName), tableIdentifier(table.getTablePath())); } try (PreparedStatement stmt = creatPreparedStatement(connection, sampleQuery, fetchSize)) { log.info(String.format("Split Chunk, approximateRowCntStatement: %s", sampleQuery)); try (ResultSet rs = stmt.executeQuery()) { int count = 0; List<Object> results = new ArrayList<>(); while (rs.next()) { count++; if (count % samplingRate == 0) { results.add(rs.getObject(1)); } if (Thread.currentThread().isInterrupted()) { throw new InterruptedException("Thread interrupted"); } } Object[] resultsArray = results.toArray(); Arrays.sort(resultsArray); return resultsArray; } } } /** * Query the maximum value of the next chunk, and the next chunk must be greater than or equal * to <code>includedLowerBound</code> value [min_1, max_1), [min_2, max_2),... [min_n, null). * Each time this method is called it will return max1, max2... * * @param connection JDBC connection. * @param table table info. * @param columnName column name. * @param chunkSize chunk size. * @param includedLowerBound the previous chunk end value. * @return next chunk end value. */ default Object queryNextChunkMax( Connection connection, JdbcSourceTable table, String columnName, int chunkSize, Object includedLowerBound) throws SQLException { String quotedColumn = quoteIdentifier(columnName); String sqlQuery; if (StringUtils.isNotBlank(table.getQuery())) { sqlQuery = String.format( "SELECT MAX(%s) FROM (" + "SELECT %s FROM (%s) AS T1 WHERE %s >= ? ORDER BY %s ASC LIMIT %s" + ") AS T2", quotedColumn, quotedColumn, table.getQuery(), quotedColumn, quotedColumn, chunkSize); } else { sqlQuery = String.format( "SELECT MAX(%s) FROM (" + "SELECT %s FROM %s WHERE %s >= ? ORDER BY %s ASC LIMIT %s" + ") AS T", quotedColumn, quotedColumn, tableIdentifier(table.getTablePath()), quotedColumn, quotedColumn, chunkSize); } try (PreparedStatement ps = connection.prepareStatement(sqlQuery)) { ps.setObject(1, includedLowerBound); try (ResultSet rs = ps.executeQuery()) { if (rs.next()) { return rs.getObject(1); } else { // this should never happen throw new SQLException( String.format("No result returned after running query [%s]", sqlQuery)); } } } } default JdbcConnectionProvider getJdbcConnectionProvider( JdbcConnectionConfig jdbcConnectionConfig) { return new SimpleJdbcConnectionProvider(jdbcConnectionConfig); } /** * Cast column type e.g. CAST(column AS type) * * @param columnName * @param columnType * @return the text of converted column type. */ default String convertType(String columnName, String columnType) { return columnName; } /** * Refresh physical table schema by schema change event * * @param connection jdbc connection * @param tablePath sink table path * @param event schema change event */ default void applySchemaChange( Connection connection, TablePath tablePath, SchemaChangeEvent event) throws SQLException { if (event instanceof AlterTableColumnsEvent) { for (AlterTableColumnEvent columnEvent : ((AlterTableColumnsEvent) event).getEvents()) { applySchemaChange(connection, tablePath, columnEvent); } } else { if (event instanceof AlterTableChangeColumnEvent) { AlterTableChangeColumnEvent changeColumnEvent = (AlterTableChangeColumnEvent) event; if (!changeColumnEvent .getOldColumn() .equals(changeColumnEvent.getColumn().getName())) { if (!columnExists(connection, tablePath, changeColumnEvent.getOldColumn()) && columnExists( connection, tablePath, changeColumnEvent.getColumn().getName())) { log.warn( "Column {} already exists in table {}. Skipping change column operation. event: {}", changeColumnEvent.getColumn().getName(), tablePath.getFullName(), event); return; } } applySchemaChange(connection, tablePath, changeColumnEvent); } else if (event instanceof AlterTableModifyColumnEvent) { applySchemaChange(connection, tablePath, (AlterTableModifyColumnEvent) event); } else if (event instanceof AlterTableAddColumnEvent) { AlterTableAddColumnEvent addColumnEvent = (AlterTableAddColumnEvent) event; if (columnExists(connection, tablePath, addColumnEvent.getColumn().getName())) { log.warn( "Column {} already exists in table {}. Skipping add column operation. event: {}", addColumnEvent.getColumn().getName(), tablePath.getFullName(), event); return; } applySchemaChange(connection, tablePath, addColumnEvent); } else if (event instanceof AlterTableDropColumnEvent) { AlterTableDropColumnEvent dropColumnEvent = (AlterTableDropColumnEvent) event; if (!columnExists(connection, tablePath, dropColumnEvent.getColumn())) { log.warn( "Column {} does not exist in table {}. Skipping drop column operation. event: {}", dropColumnEvent.getColumn(), tablePath.getFullName(), event); return; } applySchemaChange(connection, tablePath, dropColumnEvent); } else { throw new UnsupportedOperationException("Unsupported schemaChangeEvent: " + event); } } } /** * Check if the column exists in the table * * @param connection * @param tablePath * @param column * @return */ default boolean columnExists(Connection connection, TablePath tablePath, String column) { String selectColumnSQL = String.format( "SELECT %s FROM %s WHERE 1 != 1", quoteIdentifier(column), tableIdentifier(tablePath)); try (Statement statement = connection.createStatement()) { return statement.execute(selectColumnSQL); } catch (SQLException e) { log.debug("Column {} does not exist in table {}", column, tablePath.getFullName(), e); return false; } } default void applySchemaChange( Connection connection, TablePath tablePath, AlterTableAddColumnEvent event) throws SQLException { String sourceDialectName = event.getSourceDialectName(); boolean sameCatalog = StringUtils.equals(dialectName(), sourceDialectName); BasicTypeDefine typeDefine = getTypeConverter().reconvert(event.getColumn()); String columnType = sameCatalog ? event.getColumn().getSourceType() : typeDefine.getColumnType(); StringBuilder sqlBuilder = new StringBuilder() .append("ALTER TABLE") .append(" ") .append(tableIdentifier(tablePath)) .append(" ") .append("ADD COLUMN") .append(" ") .append(quoteIdentifier(event.getColumn().getName())) .append(" ") .append(columnType); // Only decorate with default value when source dialect is same as sink dialect // Todo Support for cross-database default values for ddl statements if (event.getColumn().getDefaultValue() == null) { sqlBuilder.append(" ").append(event.getColumn().isNullable() ? "NULL" : "NOT NULL"); } else { if (event.getColumn().isNullable()) { sqlBuilder.append(" NULL"); } else if (sameCatalog) { sqlBuilder.append(" ").append(event.getColumn().isNullable() ? "NULL" : "NOT NULL"); } else if (SqlType.TIMESTAMP.equals(event.getColumn().getDataType().getSqlType())) { log.warn( "Default value is not supported for column {} in table {}. Skipping add column operation. event: {}", event.getColumn().getName(), tablePath.getFullName(), event); } else { sqlBuilder.append(" NOT NULL"); } if (sameCatalog) { sqlBuilder .append(" ") .append(sqlClauseWithDefaultValue(typeDefine, sourceDialectName)); } } if (event.getColumn().getComment() != null) { sqlBuilder .append(" ") .append("COMMENT ") .append("'") .append(event.getColumn().getComment()) .append("'"); } if (event.getAfterColumn() != null) { sqlBuilder.append(" ").append("AFTER ").append(quoteIdentifier(event.getAfterColumn())); } String addColumnSQL = sqlBuilder.toString(); try (Statement statement = connection.createStatement()) { log.info("Executing add column SQL: {}", addColumnSQL); statement.execute(addColumnSQL); } } default void applySchemaChange( Connection connection, TablePath tablePath, AlterTableChangeColumnEvent event) throws SQLException { if (event.getColumn().getDataType() == null) { StringBuilder sqlBuilder = new StringBuilder() .append("ALTER TABLE") .append(" ") .append(tableIdentifier(tablePath)) .append(" ") .append("RENAME COLUMN") .append(" ") .append(quoteIdentifier(event.getOldColumn())) .append(" TO ") .append(quoteIdentifier(event.getColumn().getName())); try (Statement statement = connection.createStatement()) { log.info("Executing rename column SQL: {}", sqlBuilder); statement.execute(sqlBuilder.toString()); } return; } String sourceDialectName = event.getSourceDialectName(); boolean sameCatalog = StringUtils.equals(dialectName(), sourceDialectName); BasicTypeDefine typeDefine = getTypeConverter().reconvert(event.getColumn()); String columnType = sameCatalog ? event.getColumn().getSourceType() : typeDefine.getColumnType(); StringBuilder sqlBuilder = new StringBuilder() .append("ALTER TABLE") .append(" ") .append(tableIdentifier(tablePath)) .append(" ") .append("CHANGE COLUMN") .append(" ") .append(quoteIdentifier(event.getOldColumn())) .append(" ") .append(quoteIdentifier(event.getColumn().getName())) .append(" ") .append(columnType); // Only decorate with default value when source dialect is same as sink dialect // Todo Support for cross-database default values for ddl statements if (event.getColumn().getDefaultValue() == null) { sqlBuilder.append(" ").append(event.getColumn().isNullable() ? "NULL" : "NOT NULL"); } else { if (event.getColumn().isNullable()) { sqlBuilder.append(" NULL"); } else if (sameCatalog) { sqlBuilder.append(" ").append(event.getColumn().isNullable() ? "NULL" : "NOT NULL"); } else if (SqlType.TIMESTAMP.equals(event.getColumn().getDataType().getSqlType())) { log.warn( "Default value is not supported for column {} in table {}. Skipping add column operation. event: {}", event.getColumn().getName(), tablePath.getFullName(), event); } else { sqlBuilder.append(" NOT NULL"); } if (sameCatalog) { sqlBuilder .append(" ") .append(sqlClauseWithDefaultValue(typeDefine, sourceDialectName)); } } if (event.getColumn().getComment() != null) { sqlBuilder .append(" ") .append("COMMENT ") .append("'") .append(event.getColumn().getComment()) .append("'"); } if (event.getAfterColumn() != null) { sqlBuilder.append(" ").append("AFTER ").append(quoteIdentifier(event.getAfterColumn())); } String changeColumnSQL = sqlBuilder.toString(); try (Statement statement = connection.createStatement()) { log.info("Executing change column SQL: {}", changeColumnSQL); statement.execute(changeColumnSQL); } } default void applySchemaChange( Connection connection, TablePath tablePath, AlterTableModifyColumnEvent event) throws SQLException { String sourceDialectName = event.getSourceDialectName(); boolean sameCatalog = StringUtils.equals(dialectName(), sourceDialectName); BasicTypeDefine typeDefine = getTypeConverter().reconvert(event.getColumn()); String columnType = sameCatalog ? event.getColumn().getSourceType() : typeDefine.getColumnType(); StringBuilder sqlBuilder = new StringBuilder() .append("ALTER TABLE") .append(" ") .append(tableIdentifier(tablePath)) .append(" ") .append("MODIFY COLUMN") .append(" ") .append(quoteIdentifier(event.getColumn().getName())) .append(" ") .append(columnType); // Only decorate with default value when source dialect is same as sink dialect // Todo Support for cross-database default values for ddl statements if (event.getColumn().getDefaultValue() == null) { sqlBuilder.append(" ").append(event.getColumn().isNullable() ? "NULL" : "NOT NULL"); } else { if (event.getColumn().isNullable()) { sqlBuilder.append(" NULL"); } else if (sameCatalog) { sqlBuilder.append(" ").append(event.getColumn().isNullable() ? "NULL" : "NOT NULL"); } else if (SqlType.TIMESTAMP.equals(event.getColumn().getDataType().getSqlType())) { log.warn( "Default value is not supported for column {} in table {}. Skipping add column operation. event: {}", event.getColumn().getName(), tablePath.getFullName(), event); } else { sqlBuilder.append(" NOT NULL"); } if (sameCatalog) { sqlBuilder .append(" ") .append(sqlClauseWithDefaultValue(typeDefine, sourceDialectName)); } } if (event.getColumn().getComment() != null) { sqlBuilder .append(" ") .append("COMMENT ") .append("'") .append(event.getColumn().getComment()) .append("'"); } if (event.getAfterColumn() != null) { sqlBuilder.append(" ").append("AFTER ").append(quoteIdentifier(event.getAfterColumn())); } String modifyColumnSQL = sqlBuilder.toString(); try (Statement statement = connection.createStatement()) { log.info("Executing modify column SQL: {}", modifyColumnSQL); statement.execute(modifyColumnSQL); } } default void applySchemaChange( Connection connection, TablePath tablePath, AlterTableDropColumnEvent event) throws SQLException { String dropColumnSQL = String.format( "ALTER TABLE %s DROP COLUMN %s", tableIdentifier(tablePath), quoteIdentifier(event.getColumn())); try (Statement statement = connection.createStatement()) { log.info("Executing drop column SQL: {}", dropColumnSQL); statement.execute(dropColumnSQL); } } /** * Get the SQL clause for define column default value * * @param columnDefine column define * @param sourceDialectName * @return SQL clause for define default value */ default String sqlClauseWithDefaultValue( BasicTypeDefine columnDefine, String sourceDialectName) { Object defaultValue = columnDefine.getDefaultValue(); if (Objects.nonNull(defaultValue) && needsQuotesWithDefaultValue(columnDefine) && !isSpecialDefaultValue(defaultValue, sourceDialectName)) { defaultValue = quotesDefaultValue(defaultValue); } return "DEFAULT " + defaultValue; } /** * Whether support default value * * @param columnDefine column define * @return whether support set default value */ default boolean supportDefaultValue(BasicTypeDefine columnDefine) { return true; } /** * whether quotes with default value * * @param columnDefine column define * @return whether needs quotes with the type */ default boolean needsQuotesWithDefaultValue(BasicTypeDefine columnDefine) { return false; } /** * whether is special default value e.g. current_timestamp * * @param defaultValue default value of column * @param sourceDialectName source dialect name * @return whether is special default value e.g current_timestamp */ default boolean isSpecialDefaultValue(Object defaultValue, String sourceDialectName) { if (DatabaseIdentifier.MYSQL.equals(sourceDialectName)) { return DefaultValueUtils.isMysqlSpecialDefaultValue(defaultValue); } return false; } /** * quotes default value * * @param defaultValue default value of column * @return quoted default value */ default String quotesDefaultValue(Object defaultValue) { return "'" + defaultValue + "'"; } default String getCollationSequence(Connection connection, String collate) { StringBuilder sb = new StringBuilder(); String getDual = dualTable(); String baseQuery = "SELECT char_val FROM ("; StringBuilder unionQuery = new StringBuilder(); for (int i = 32; i <= 126; i++) { if (i > 32) unionQuery.append(" UNION ALL "); unionQuery.append("SELECT ? AS char_val ").append(getDual); } String sortedQuery = baseQuery + unionQuery + ") ndi_tmp_chars ORDER BY " + getCollateSql(collate); log.info("sortedCollationQuery is " + sortedQuery); PreparedStatement preparedStatement; try { preparedStatement = connection.prepareStatement(sortedQuery); for (int i = 32; i <= 126; i++) { log.debug("setString " + (i - 32) + " => " + (char) i); preparedStatement.setString(i - 32 + 1, String.valueOf((char) i)); } ResultSet resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { sb.append(resultSet.getString("char_val")); } return sb.toString(); } catch (SQLException e) { throw new RuntimeException(e); } } default String getCollateSql(String collate) { String getCollate = StringUtils.isNotBlank(collate) ? "char_val COLLATE " + collate : "char_val"; return getCollate; } default String dualTable() { return ""; } }
googleapis/google-cloud-java
37,087
java-websecurityscanner/proto-google-cloud-websecurityscanner-v1beta/src/main/java/com/google/cloud/websecurityscanner/v1beta/ListScanRunsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/websecurityscanner/v1beta/web_security_scanner.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.websecurityscanner.v1beta; /** * * * <pre> * Response for the `ListScanRuns` method. * </pre> * * Protobuf type {@code google.cloud.websecurityscanner.v1beta.ListScanRunsResponse} */ public final class ListScanRunsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1beta.ListScanRunsResponse) ListScanRunsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListScanRunsResponse.newBuilder() to construct. private ListScanRunsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListScanRunsResponse() { scanRuns_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListScanRunsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1beta_ListScanRunsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1beta_ListScanRunsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse.class, com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse.Builder.class); } public static final int SCAN_RUNS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanRun> scanRuns_; /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanRun> getScanRunsList() { return scanRuns_; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.websecurityscanner.v1beta.ScanRunOrBuilder> getScanRunsOrBuilderList() { return scanRuns_; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ @java.lang.Override public int getScanRunsCount() { return scanRuns_.size(); } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ @java.lang.Override public com.google.cloud.websecurityscanner.v1beta.ScanRun getScanRuns(int index) { return scanRuns_.get(index); } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ @java.lang.Override public com.google.cloud.websecurityscanner.v1beta.ScanRunOrBuilder getScanRunsOrBuilder( int index) { return scanRuns_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < scanRuns_.size(); i++) { output.writeMessage(1, scanRuns_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < scanRuns_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, scanRuns_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse)) { return super.equals(obj); } com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse other = (com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse) obj; if (!getScanRunsList().equals(other.getScanRunsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getScanRunsCount() > 0) { hash = (37 * hash) + SCAN_RUNS_FIELD_NUMBER; hash = (53 * hash) + getScanRunsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for the `ListScanRuns` method. * </pre> * * Protobuf type {@code google.cloud.websecurityscanner.v1beta.ListScanRunsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1beta.ListScanRunsResponse) com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1beta_ListScanRunsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1beta_ListScanRunsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse.class, com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse.Builder.class); } // Construct using com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (scanRunsBuilder_ == null) { scanRuns_ = java.util.Collections.emptyList(); } else { scanRuns_ = null; scanRunsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto .internal_static_google_cloud_websecurityscanner_v1beta_ListScanRunsResponse_descriptor; } @java.lang.Override public com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse getDefaultInstanceForType() { return com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse build() { com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse buildPartial() { com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse result = new com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse result) { if (scanRunsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { scanRuns_ = java.util.Collections.unmodifiableList(scanRuns_); bitField0_ = (bitField0_ & ~0x00000001); } result.scanRuns_ = scanRuns_; } else { result.scanRuns_ = scanRunsBuilder_.build(); } } private void buildPartial0( com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse) { return mergeFrom((com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse other) { if (other == com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse.getDefaultInstance()) return this; if (scanRunsBuilder_ == null) { if (!other.scanRuns_.isEmpty()) { if (scanRuns_.isEmpty()) { scanRuns_ = other.scanRuns_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureScanRunsIsMutable(); scanRuns_.addAll(other.scanRuns_); } onChanged(); } } else { if (!other.scanRuns_.isEmpty()) { if (scanRunsBuilder_.isEmpty()) { scanRunsBuilder_.dispose(); scanRunsBuilder_ = null; scanRuns_ = other.scanRuns_; bitField0_ = (bitField0_ & ~0x00000001); scanRunsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getScanRunsFieldBuilder() : null; } else { scanRunsBuilder_.addAllMessages(other.scanRuns_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.websecurityscanner.v1beta.ScanRun m = input.readMessage( com.google.cloud.websecurityscanner.v1beta.ScanRun.parser(), extensionRegistry); if (scanRunsBuilder_ == null) { ensureScanRunsIsMutable(); scanRuns_.add(m); } else { scanRunsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanRun> scanRuns_ = java.util.Collections.emptyList(); private void ensureScanRunsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { scanRuns_ = new java.util.ArrayList<com.google.cloud.websecurityscanner.v1beta.ScanRun>(scanRuns_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.websecurityscanner.v1beta.ScanRun, com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder, com.google.cloud.websecurityscanner.v1beta.ScanRunOrBuilder> scanRunsBuilder_; /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanRun> getScanRunsList() { if (scanRunsBuilder_ == null) { return java.util.Collections.unmodifiableList(scanRuns_); } else { return scanRunsBuilder_.getMessageList(); } } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public int getScanRunsCount() { if (scanRunsBuilder_ == null) { return scanRuns_.size(); } else { return scanRunsBuilder_.getCount(); } } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public com.google.cloud.websecurityscanner.v1beta.ScanRun getScanRuns(int index) { if (scanRunsBuilder_ == null) { return scanRuns_.get(index); } else { return scanRunsBuilder_.getMessage(index); } } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder setScanRuns( int index, com.google.cloud.websecurityscanner.v1beta.ScanRun value) { if (scanRunsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScanRunsIsMutable(); scanRuns_.set(index, value); onChanged(); } else { scanRunsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder setScanRuns( int index, com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder builderForValue) { if (scanRunsBuilder_ == null) { ensureScanRunsIsMutable(); scanRuns_.set(index, builderForValue.build()); onChanged(); } else { scanRunsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder addScanRuns(com.google.cloud.websecurityscanner.v1beta.ScanRun value) { if (scanRunsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScanRunsIsMutable(); scanRuns_.add(value); onChanged(); } else { scanRunsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder addScanRuns( int index, com.google.cloud.websecurityscanner.v1beta.ScanRun value) { if (scanRunsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureScanRunsIsMutable(); scanRuns_.add(index, value); onChanged(); } else { scanRunsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder addScanRuns( com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder builderForValue) { if (scanRunsBuilder_ == null) { ensureScanRunsIsMutable(); scanRuns_.add(builderForValue.build()); onChanged(); } else { scanRunsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder addScanRuns( int index, com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder builderForValue) { if (scanRunsBuilder_ == null) { ensureScanRunsIsMutable(); scanRuns_.add(index, builderForValue.build()); onChanged(); } else { scanRunsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder addAllScanRuns( java.lang.Iterable<? extends com.google.cloud.websecurityscanner.v1beta.ScanRun> values) { if (scanRunsBuilder_ == null) { ensureScanRunsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, scanRuns_); onChanged(); } else { scanRunsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder clearScanRuns() { if (scanRunsBuilder_ == null) { scanRuns_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { scanRunsBuilder_.clear(); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public Builder removeScanRuns(int index) { if (scanRunsBuilder_ == null) { ensureScanRunsIsMutable(); scanRuns_.remove(index); onChanged(); } else { scanRunsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder getScanRunsBuilder( int index) { return getScanRunsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public com.google.cloud.websecurityscanner.v1beta.ScanRunOrBuilder getScanRunsOrBuilder( int index) { if (scanRunsBuilder_ == null) { return scanRuns_.get(index); } else { return scanRunsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public java.util.List<? extends com.google.cloud.websecurityscanner.v1beta.ScanRunOrBuilder> getScanRunsOrBuilderList() { if (scanRunsBuilder_ != null) { return scanRunsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(scanRuns_); } } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder addScanRunsBuilder() { return getScanRunsFieldBuilder() .addBuilder(com.google.cloud.websecurityscanner.v1beta.ScanRun.getDefaultInstance()); } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder addScanRunsBuilder( int index) { return getScanRunsFieldBuilder() .addBuilder( index, com.google.cloud.websecurityscanner.v1beta.ScanRun.getDefaultInstance()); } /** * * * <pre> * The list of ScanRuns returned. * </pre> * * <code>repeated .google.cloud.websecurityscanner.v1beta.ScanRun scan_runs = 1;</code> */ public java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder> getScanRunsBuilderList() { return getScanRunsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.websecurityscanner.v1beta.ScanRun, com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder, com.google.cloud.websecurityscanner.v1beta.ScanRunOrBuilder> getScanRunsFieldBuilder() { if (scanRunsBuilder_ == null) { scanRunsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.websecurityscanner.v1beta.ScanRun, com.google.cloud.websecurityscanner.v1beta.ScanRun.Builder, com.google.cloud.websecurityscanner.v1beta.ScanRunOrBuilder>( scanRuns_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); scanRuns_ = null; } return scanRunsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1beta.ListScanRunsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ListScanRunsResponse) private static final com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse(); } public static com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListScanRunsResponse> PARSER = new com.google.protobuf.AbstractParser<ListScanRunsResponse>() { @java.lang.Override public ListScanRunsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListScanRunsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListScanRunsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,326
java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/RegionSslCertificatesClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.longrunning.OperationFuture; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.stub.RegionSslCertificatesStub; import com.google.cloud.compute.v1.stub.RegionSslCertificatesStubSettings; import com.google.common.util.concurrent.MoreExecutors; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The RegionSslCertificates API. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * String project = "project-309310695"; * String region = "region-934795532"; * String sslCertificate = "sslCertificate-1304941589"; * SslCertificate response = regionSslCertificatesClient.get(project, region, sslCertificate); * } * }</pre> * * <p>Note: close() needs to be called on the RegionSslCertificatesClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> Delete</td> * <td><p> Deletes the specified SslCertificate resource in the region.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> deleteAsync(DeleteRegionSslCertificateRequest request) * </ul> * <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p> * <ul> * <li><p> deleteAsync(String project, String region, String sslCertificate) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> deleteOperationCallable() * <li><p> deleteCallable() * </ul> * </td> * </tr> * <tr> * <td><p> Get</td> * <td><p> Returns the specified SslCertificate resource in the specified region. Get a list of available SSL certificates by making a list() request.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> get(GetRegionSslCertificateRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> get(String project, String region, String sslCertificate) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getCallable() * </ul> * </td> * </tr> * <tr> * <td><p> Insert</td> * <td><p> Creates a SslCertificate resource in the specified project and region using the data included in the request</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> insertAsync(InsertRegionSslCertificateRequest request) * </ul> * <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p> * <ul> * <li><p> insertAsync(String project, String region, SslCertificate sslCertificateResource) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> insertOperationCallable() * <li><p> insertCallable() * </ul> * </td> * </tr> * <tr> * <td><p> List</td> * <td><p> Retrieves the list of SslCertificate resources available to the specified project in the specified region.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> list(ListRegionSslCertificatesRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> list(String project, String region) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listPagedCallable() * <li><p> listCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of RegionSslCertificatesSettings * to create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * RegionSslCertificatesSettings regionSslCertificatesSettings = * RegionSslCertificatesSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create(regionSslCertificatesSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * RegionSslCertificatesSettings regionSslCertificatesSettings = * RegionSslCertificatesSettings.newBuilder().setEndpoint(myEndpoint).build(); * RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create(regionSslCertificatesSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class RegionSslCertificatesClient implements BackgroundResource { private final RegionSslCertificatesSettings settings; private final RegionSslCertificatesStub stub; /** Constructs an instance of RegionSslCertificatesClient with default settings. */ public static final RegionSslCertificatesClient create() throws IOException { return create(RegionSslCertificatesSettings.newBuilder().build()); } /** * Constructs an instance of RegionSslCertificatesClient, using the given settings. The channels * are created based on the settings passed in, or defaults for any settings that are not set. */ public static final RegionSslCertificatesClient create(RegionSslCertificatesSettings settings) throws IOException { return new RegionSslCertificatesClient(settings); } /** * Constructs an instance of RegionSslCertificatesClient, using the given stub for making calls. * This is for advanced usage - prefer using create(RegionSslCertificatesSettings). */ public static final RegionSslCertificatesClient create(RegionSslCertificatesStub stub) { return new RegionSslCertificatesClient(stub); } /** * Constructs an instance of RegionSslCertificatesClient, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected RegionSslCertificatesClient(RegionSslCertificatesSettings settings) throws IOException { this.settings = settings; this.stub = ((RegionSslCertificatesStubSettings) settings.getStubSettings()).createStub(); } protected RegionSslCertificatesClient(RegionSslCertificatesStub stub) { this.settings = null; this.stub = stub; } public final RegionSslCertificatesSettings getSettings() { return settings; } public RegionSslCertificatesStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the specified SslCertificate resource in the region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * String project = "project-309310695"; * String region = "region-934795532"; * String sslCertificate = "sslCertificate-1304941589"; * Operation response = * regionSslCertificatesClient.deleteAsync(project, region, sslCertificate).get(); * } * }</pre> * * @param project Project ID for this request. * @param region Name of the region scoping this request. * @param sslCertificate Name of the SslCertificate resource to delete. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Operation, Operation> deleteAsync( String project, String region, String sslCertificate) { DeleteRegionSslCertificateRequest request = DeleteRegionSslCertificateRequest.newBuilder() .setProject(project) .setRegion(region) .setSslCertificate(sslCertificate) .build(); return deleteAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the specified SslCertificate resource in the region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * DeleteRegionSslCertificateRequest request = * DeleteRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setRequestId("requestId693933066") * .setSslCertificate("sslCertificate-1304941589") * .build(); * Operation response = regionSslCertificatesClient.deleteAsync(request).get(); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Operation, Operation> deleteAsync( DeleteRegionSslCertificateRequest request) { return deleteOperationCallable().futureCall(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the specified SslCertificate resource in the region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * DeleteRegionSslCertificateRequest request = * DeleteRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setRequestId("requestId693933066") * .setSslCertificate("sslCertificate-1304941589") * .build(); * OperationFuture<Operation, Operation> future = * regionSslCertificatesClient.deleteOperationCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final OperationCallable<DeleteRegionSslCertificateRequest, Operation, Operation> deleteOperationCallable() { return stub.deleteOperationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the specified SslCertificate resource in the region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * DeleteRegionSslCertificateRequest request = * DeleteRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setRequestId("requestId693933066") * .setSslCertificate("sslCertificate-1304941589") * .build(); * ApiFuture<Operation> future = * regionSslCertificatesClient.deleteCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final UnaryCallable<DeleteRegionSslCertificateRequest, Operation> deleteCallable() { return stub.deleteCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the specified SslCertificate resource in the specified region. Get a list of available * SSL certificates by making a list() request. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * String project = "project-309310695"; * String region = "region-934795532"; * String sslCertificate = "sslCertificate-1304941589"; * SslCertificate response = regionSslCertificatesClient.get(project, region, sslCertificate); * } * }</pre> * * @param project Project ID for this request. * @param region Name of the region scoping this request. * @param sslCertificate Name of the SslCertificate resource to return. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final SslCertificate get(String project, String region, String sslCertificate) { GetRegionSslCertificateRequest request = GetRegionSslCertificateRequest.newBuilder() .setProject(project) .setRegion(region) .setSslCertificate(sslCertificate) .build(); return get(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the specified SslCertificate resource in the specified region. Get a list of available * SSL certificates by making a list() request. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * GetRegionSslCertificateRequest request = * GetRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setSslCertificate("sslCertificate-1304941589") * .build(); * SslCertificate response = regionSslCertificatesClient.get(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final SslCertificate get(GetRegionSslCertificateRequest request) { return getCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the specified SslCertificate resource in the specified region. Get a list of available * SSL certificates by making a list() request. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * GetRegionSslCertificateRequest request = * GetRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setSslCertificate("sslCertificate-1304941589") * .build(); * ApiFuture<SslCertificate> future = * regionSslCertificatesClient.getCallable().futureCall(request); * // Do something. * SslCertificate response = future.get(); * } * }</pre> */ public final UnaryCallable<GetRegionSslCertificateRequest, SslCertificate> getCallable() { return stub.getCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a SslCertificate resource in the specified project and region using the data included * in the request * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * String project = "project-309310695"; * String region = "region-934795532"; * SslCertificate sslCertificateResource = SslCertificate.newBuilder().build(); * Operation response = * regionSslCertificatesClient.insertAsync(project, region, sslCertificateResource).get(); * } * }</pre> * * @param project Project ID for this request. * @param region Name of the region scoping this request. * @param sslCertificateResource The body resource for this request * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Operation, Operation> insertAsync( String project, String region, SslCertificate sslCertificateResource) { InsertRegionSslCertificateRequest request = InsertRegionSslCertificateRequest.newBuilder() .setProject(project) .setRegion(region) .setSslCertificateResource(sslCertificateResource) .build(); return insertAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a SslCertificate resource in the specified project and region using the data included * in the request * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * InsertRegionSslCertificateRequest request = * InsertRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setRequestId("requestId693933066") * .setSslCertificateResource(SslCertificate.newBuilder().build()) * .build(); * Operation response = regionSslCertificatesClient.insertAsync(request).get(); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Operation, Operation> insertAsync( InsertRegionSslCertificateRequest request) { return insertOperationCallable().futureCall(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a SslCertificate resource in the specified project and region using the data included * in the request * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * InsertRegionSslCertificateRequest request = * InsertRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setRequestId("requestId693933066") * .setSslCertificateResource(SslCertificate.newBuilder().build()) * .build(); * OperationFuture<Operation, Operation> future = * regionSslCertificatesClient.insertOperationCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final OperationCallable<InsertRegionSslCertificateRequest, Operation, Operation> insertOperationCallable() { return stub.insertOperationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a SslCertificate resource in the specified project and region using the data included * in the request * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * InsertRegionSslCertificateRequest request = * InsertRegionSslCertificateRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .setRequestId("requestId693933066") * .setSslCertificateResource(SslCertificate.newBuilder().build()) * .build(); * ApiFuture<Operation> future = * regionSslCertificatesClient.insertCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final UnaryCallable<InsertRegionSslCertificateRequest, Operation> insertCallable() { return stub.insertCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of SslCertificate resources available to the specified project in the * specified region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * String project = "project-309310695"; * String region = "region-934795532"; * for (SslCertificate element : * regionSslCertificatesClient.list(project, region).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param project Project ID for this request. * @param region Name of the region scoping this request. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPagedResponse list(String project, String region) { ListRegionSslCertificatesRequest request = ListRegionSslCertificatesRequest.newBuilder().setProject(project).setRegion(region).build(); return list(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of SslCertificate resources available to the specified project in the * specified region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * ListRegionSslCertificatesRequest request = * ListRegionSslCertificatesRequest.newBuilder() * .setFilter("filter-1274492040") * .setMaxResults(1128457243) * .setOrderBy("orderBy-1207110587") * .setPageToken("pageToken873572522") * .setProject("project-309310695") * .setRegion("region-934795532") * .setReturnPartialSuccess(true) * .build(); * for (SslCertificate element : regionSslCertificatesClient.list(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPagedResponse list(ListRegionSslCertificatesRequest request) { return listPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of SslCertificate resources available to the specified project in the * specified region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * ListRegionSslCertificatesRequest request = * ListRegionSslCertificatesRequest.newBuilder() * .setFilter("filter-1274492040") * .setMaxResults(1128457243) * .setOrderBy("orderBy-1207110587") * .setPageToken("pageToken873572522") * .setProject("project-309310695") * .setRegion("region-934795532") * .setReturnPartialSuccess(true) * .build(); * ApiFuture<SslCertificate> future = * regionSslCertificatesClient.listPagedCallable().futureCall(request); * // Do something. * for (SslCertificate element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListRegionSslCertificatesRequest, ListPagedResponse> listPagedCallable() { return stub.listPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of SslCertificate resources available to the specified project in the * specified region. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (RegionSslCertificatesClient regionSslCertificatesClient = * RegionSslCertificatesClient.create()) { * ListRegionSslCertificatesRequest request = * ListRegionSslCertificatesRequest.newBuilder() * .setFilter("filter-1274492040") * .setMaxResults(1128457243) * .setOrderBy("orderBy-1207110587") * .setPageToken("pageToken873572522") * .setProject("project-309310695") * .setRegion("region-934795532") * .setReturnPartialSuccess(true) * .build(); * while (true) { * SslCertificateList response = regionSslCertificatesClient.listCallable().call(request); * for (SslCertificate element : response.getItemsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListRegionSslCertificatesRequest, SslCertificateList> listCallable() { return stub.listCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class ListPagedResponse extends AbstractPagedListResponse< ListRegionSslCertificatesRequest, SslCertificateList, SslCertificate, ListPage, ListFixedSizeCollection> { public static ApiFuture<ListPagedResponse> createAsync( PageContext<ListRegionSslCertificatesRequest, SslCertificateList, SslCertificate> context, ApiFuture<SslCertificateList> futureResponse) { ApiFuture<ListPage> futurePage = ListPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListPagedResponse(input), MoreExecutors.directExecutor()); } private ListPagedResponse(ListPage page) { super(page, ListFixedSizeCollection.createEmptyCollection()); } } public static class ListPage extends AbstractPage< ListRegionSslCertificatesRequest, SslCertificateList, SslCertificate, ListPage> { private ListPage( PageContext<ListRegionSslCertificatesRequest, SslCertificateList, SslCertificate> context, SslCertificateList response) { super(context, response); } private static ListPage createEmptyPage() { return new ListPage(null, null); } @Override protected ListPage createPage( PageContext<ListRegionSslCertificatesRequest, SslCertificateList, SslCertificate> context, SslCertificateList response) { return new ListPage(context, response); } @Override public ApiFuture<ListPage> createPageAsync( PageContext<ListRegionSslCertificatesRequest, SslCertificateList, SslCertificate> context, ApiFuture<SslCertificateList> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListFixedSizeCollection extends AbstractFixedSizeCollection< ListRegionSslCertificatesRequest, SslCertificateList, SslCertificate, ListPage, ListFixedSizeCollection> { private ListFixedSizeCollection(List<ListPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListFixedSizeCollection createEmptyCollection() { return new ListFixedSizeCollection(null, 0); } @Override protected ListFixedSizeCollection createCollection(List<ListPage> pages, int collectionSize) { return new ListFixedSizeCollection(pages, collectionSize); } } }
googleapis/google-api-java-client-services
37,405
clients/google-api-services-retail/v2alpha/1.31.0/com/google/api/services/retail/v2alpha/model/GoogleCloudRetailV2alphaSearchRequest.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.retail.v2alpha.model; /** * Request message for SearchService.Search method. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Retail API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudRetailV2alphaSearchRequest extends com.google.api.client.json.GenericJson { /** * Boost specification to boost certain products. See more details at this [user * guide](https://cloud.google.com/retail/docs/boosting). Notice that if both * ServingConfig.boost_control_ids and SearchRequest.boost_spec are set, the boost conditions from * both places are evaluated. If a search request matches multiple boost conditions, the final * boost score is equal to the sum of the boost scores from all matched boost conditions. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaSearchRequestBoostSpec boostSpec; /** * The branch resource name, such as * `projects/locations/global/catalogs/default_catalog/branches/0`. Use "default_branch" as the * branch ID or leave this field empty, to search products under the default branch. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String branch; /** * The default filter that is applied when a user performs a search without checking any filters * on the search page. The filter applied to every search request when quality improvement such as * query expansion is needed. For example, if a query does not have enough results, an expanded * query with SearchRequest.canonical_filter will be returned as a supplement of the original * query. This field is strongly recommended to achieve high search quality. See * SearchRequest.filter for more details about filter syntax. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String canonicalFilter; /** * Deprecated. Refer to https://cloud.google.com/retail/docs/configs#dynamic to enable dynamic * facets. Do not set this field. The specification for dynamically generated facets. Notice that * only textual facets can be dynamically generated. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaSearchRequestDynamicFacetSpec dynamicFacetSpec; /** * Facet specifications for faceted search. If empty, no facets are returned. A maximum of 100 * values are allowed. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleCloudRetailV2alphaSearchRequestFacetSpec> facetSpecs; /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. Filter expression is case-sensitive. See more * details at this [user guide](https://cloud.google.com/retail/docs/filter-and-order#filter). If * this field is unrecognizable, an INVALID_ARGUMENT is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String filter; /** * The labels applied to a resource must meet the following requirements: * Each resource can have * multiple labels, up to a maximum of 64. * Each label must be a key-value pair. * Keys have a * minimum length of 1 character and a maximum length of 63 characters and cannot be empty. Values * can be empty and have a maximum length of 63 characters. * Keys and values can contain only * lowercase letters, numeric characters, underscores, and dashes. All characters must use UTF-8 * encoding, and international characters are allowed. * The key portion of a label must be * unique. However, you can use the same key with multiple resources. * Keys must start with a * lowercase letter or international character. See [Google Cloud * Document](https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements) * for more details. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * A 0-indexed integer that specifies the current offset (that is, starting result location, * amongst the Products deemed by the API as relevant) in search results. This field is only * considered if page_token is unset. If this field is negative, an INVALID_ARGUMENT is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer offset; /** * The order in which products are returned. Products can be ordered by a field in an Product * object. Leave it unset if ordered by relevance. OrderBy expression is case-sensitive. See more * details at this [user guide](https://cloud.google.com/retail/docs/filter-and-order#order). If * this field is unrecognizable, an INVALID_ARGUMENT is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String orderBy; /** * The categories associated with a category page. Required for category navigation queries to * achieve good search quality. The format should be the same as UserEvent.page_categories; To * represent full path of category, use '>' sign to separate different hierarchies. If '>' is part * of the category name, please replace it with other character(s). Category pages include special * pages such as sales or promotions. For instance, a special sale page may have the category * hierarchy: "pageCategories" : ["Sales > 2017 Black Friday Deals"]. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> pageCategories; /** * Maximum number of Products to return. If unspecified, defaults to a reasonable value. The * maximum allowed value is 120. Values above 120 will be coerced to 120. If this field is * negative, an INVALID_ARGUMENT is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer pageSize; /** * A page token SearchResponse.next_page_token, received from a previous SearchService.Search * call. Provide this to retrieve the subsequent page. When paginating, all other parameters * provided to SearchService.Search must match the call that provided the page token. Otherwise, * an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String pageToken; /** * The specification for personalization. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaSearchRequestPersonalizationSpec personalizationSpec; /** * Raw search query. If this field is empty, the request is considered a category browsing request * and returned results are based on filter and page_categories. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String query; /** * The query expansion specification that specifies the conditions under which query expansion * will occur. See more details at this [user guide](https://cloud.google.com/retail/docs/result- * size#query_expansion). * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaSearchRequestQueryExpansionSpec queryExpansionSpec; /** * The relevance threshold of the search results. Defaults to RelevanceThreshold.HIGH, which means * only the most relevant results are shown, and the least number of results are returned. See * more details at this [user guide](https://cloud.google.com/retail/docs/result- * size#relevance_thresholding). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String relevanceThreshold; /** * The search mode of the search request. If not specified, a single search request triggers both * product search and faceted search. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String searchMode; /** * The spell correction specification that specifies the mode under which spell correction will * take effect. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaSearchRequestSpellCorrectionSpec spellCorrectionSpec; /** * User information. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaUserInfo userInfo; /** * The keys to fetch and rollup the matching variant Products attributes, FulfillmentInfo or * LocalInventorys attributes. The attributes from all the matching variant Products or * LocalInventorys are merged and de-duplicated. Notice that rollup attributes will lead to extra * query latency. Maximum number of keys is 30. For FulfillmentInfo, a fulfillment type and a * fulfillment ID must be provided in the format of "fulfillmentType.fulfillmentId". E.g., in * "pickupInStore.store123", "pickupInStore" is fulfillment type and "store123" is the store ID. * Supported keys are: * colorFamilies * price * originalPrice * discount * variantId * * inventory(place_id,price) * inventory(place_id,original_price) * * inventory(place_id,attributes.key), where key is any key in the * Product.local_inventories.attributes map. * attributes.key, where key is any key in the * Product.attributes map. * pickupInStore.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "pickup-in-store". * shipToStore.id, where id is any * FulfillmentInfo.place_ids for FulfillmentInfo.type "ship-to-store". * sameDayDelivery.id, where * id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "same-day-delivery". * * nextDayDelivery.id, where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "next- * day-delivery". * customFulfillment1.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "custom-type-1". * customFulfillment2.id, where id is any * FulfillmentInfo.place_ids for FulfillmentInfo.type "custom-type-2". * customFulfillment3.id, * where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "custom-type-3". * * customFulfillment4.id, where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type * "custom-type-4". * customFulfillment5.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "custom-type-5". If this field is set to an invalid value other than * these, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> variantRollupKeys; /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor logs in or out of the website. This * should be the same identifier as UserEvent.visitor_id. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String visitorId; /** * Boost specification to boost certain products. See more details at this [user * guide](https://cloud.google.com/retail/docs/boosting). Notice that if both * ServingConfig.boost_control_ids and SearchRequest.boost_spec are set, the boost conditions from * both places are evaluated. If a search request matches multiple boost conditions, the final * boost score is equal to the sum of the boost scores from all matched boost conditions. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequestBoostSpec getBoostSpec() { return boostSpec; } /** * Boost specification to boost certain products. See more details at this [user * guide](https://cloud.google.com/retail/docs/boosting). Notice that if both * ServingConfig.boost_control_ids and SearchRequest.boost_spec are set, the boost conditions from * both places are evaluated. If a search request matches multiple boost conditions, the final * boost score is equal to the sum of the boost scores from all matched boost conditions. * @param boostSpec boostSpec or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setBoostSpec(GoogleCloudRetailV2alphaSearchRequestBoostSpec boostSpec) { this.boostSpec = boostSpec; return this; } /** * The branch resource name, such as * `projects/locations/global/catalogs/default_catalog/branches/0`. Use "default_branch" as the * branch ID or leave this field empty, to search products under the default branch. * @return value or {@code null} for none */ public java.lang.String getBranch() { return branch; } /** * The branch resource name, such as * `projects/locations/global/catalogs/default_catalog/branches/0`. Use "default_branch" as the * branch ID or leave this field empty, to search products under the default branch. * @param branch branch or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setBranch(java.lang.String branch) { this.branch = branch; return this; } /** * The default filter that is applied when a user performs a search without checking any filters * on the search page. The filter applied to every search request when quality improvement such as * query expansion is needed. For example, if a query does not have enough results, an expanded * query with SearchRequest.canonical_filter will be returned as a supplement of the original * query. This field is strongly recommended to achieve high search quality. See * SearchRequest.filter for more details about filter syntax. * @return value or {@code null} for none */ public java.lang.String getCanonicalFilter() { return canonicalFilter; } /** * The default filter that is applied when a user performs a search without checking any filters * on the search page. The filter applied to every search request when quality improvement such as * query expansion is needed. For example, if a query does not have enough results, an expanded * query with SearchRequest.canonical_filter will be returned as a supplement of the original * query. This field is strongly recommended to achieve high search quality. See * SearchRequest.filter for more details about filter syntax. * @param canonicalFilter canonicalFilter or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setCanonicalFilter(java.lang.String canonicalFilter) { this.canonicalFilter = canonicalFilter; return this; } /** * Deprecated. Refer to https://cloud.google.com/retail/docs/configs#dynamic to enable dynamic * facets. Do not set this field. The specification for dynamically generated facets. Notice that * only textual facets can be dynamically generated. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequestDynamicFacetSpec getDynamicFacetSpec() { return dynamicFacetSpec; } /** * Deprecated. Refer to https://cloud.google.com/retail/docs/configs#dynamic to enable dynamic * facets. Do not set this field. The specification for dynamically generated facets. Notice that * only textual facets can be dynamically generated. * @param dynamicFacetSpec dynamicFacetSpec or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setDynamicFacetSpec(GoogleCloudRetailV2alphaSearchRequestDynamicFacetSpec dynamicFacetSpec) { this.dynamicFacetSpec = dynamicFacetSpec; return this; } /** * Facet specifications for faceted search. If empty, no facets are returned. A maximum of 100 * values are allowed. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.util.List<GoogleCloudRetailV2alphaSearchRequestFacetSpec> getFacetSpecs() { return facetSpecs; } /** * Facet specifications for faceted search. If empty, no facets are returned. A maximum of 100 * values are allowed. Otherwise, an INVALID_ARGUMENT error is returned. * @param facetSpecs facetSpecs or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setFacetSpecs(java.util.List<GoogleCloudRetailV2alphaSearchRequestFacetSpec> facetSpecs) { this.facetSpecs = facetSpecs; return this; } /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. Filter expression is case-sensitive. See more * details at this [user guide](https://cloud.google.com/retail/docs/filter-and-order#filter). If * this field is unrecognizable, an INVALID_ARGUMENT is returned. * @return value or {@code null} for none */ public java.lang.String getFilter() { return filter; } /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. Filter expression is case-sensitive. See more * details at this [user guide](https://cloud.google.com/retail/docs/filter-and-order#filter). If * this field is unrecognizable, an INVALID_ARGUMENT is returned. * @param filter filter or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setFilter(java.lang.String filter) { this.filter = filter; return this; } /** * The labels applied to a resource must meet the following requirements: * Each resource can have * multiple labels, up to a maximum of 64. * Each label must be a key-value pair. * Keys have a * minimum length of 1 character and a maximum length of 63 characters and cannot be empty. Values * can be empty and have a maximum length of 63 characters. * Keys and values can contain only * lowercase letters, numeric characters, underscores, and dashes. All characters must use UTF-8 * encoding, and international characters are allowed. * The key portion of a label must be * unique. However, you can use the same key with multiple resources. * Keys must start with a * lowercase letter or international character. See [Google Cloud * Document](https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements) * for more details. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * The labels applied to a resource must meet the following requirements: * Each resource can have * multiple labels, up to a maximum of 64. * Each label must be a key-value pair. * Keys have a * minimum length of 1 character and a maximum length of 63 characters and cannot be empty. Values * can be empty and have a maximum length of 63 characters. * Keys and values can contain only * lowercase letters, numeric characters, underscores, and dashes. All characters must use UTF-8 * encoding, and international characters are allowed. * The key portion of a label must be * unique. However, you can use the same key with multiple resources. * Keys must start with a * lowercase letter or international character. See [Google Cloud * Document](https://cloud.google.com/resource-manager/docs/creating-managing-labels#requirements) * for more details. * @param labels labels or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * A 0-indexed integer that specifies the current offset (that is, starting result location, * amongst the Products deemed by the API as relevant) in search results. This field is only * considered if page_token is unset. If this field is negative, an INVALID_ARGUMENT is returned. * @return value or {@code null} for none */ public java.lang.Integer getOffset() { return offset; } /** * A 0-indexed integer that specifies the current offset (that is, starting result location, * amongst the Products deemed by the API as relevant) in search results. This field is only * considered if page_token is unset. If this field is negative, an INVALID_ARGUMENT is returned. * @param offset offset or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setOffset(java.lang.Integer offset) { this.offset = offset; return this; } /** * The order in which products are returned. Products can be ordered by a field in an Product * object. Leave it unset if ordered by relevance. OrderBy expression is case-sensitive. See more * details at this [user guide](https://cloud.google.com/retail/docs/filter-and-order#order). If * this field is unrecognizable, an INVALID_ARGUMENT is returned. * @return value or {@code null} for none */ public java.lang.String getOrderBy() { return orderBy; } /** * The order in which products are returned. Products can be ordered by a field in an Product * object. Leave it unset if ordered by relevance. OrderBy expression is case-sensitive. See more * details at this [user guide](https://cloud.google.com/retail/docs/filter-and-order#order). If * this field is unrecognizable, an INVALID_ARGUMENT is returned. * @param orderBy orderBy or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setOrderBy(java.lang.String orderBy) { this.orderBy = orderBy; return this; } /** * The categories associated with a category page. Required for category navigation queries to * achieve good search quality. The format should be the same as UserEvent.page_categories; To * represent full path of category, use '>' sign to separate different hierarchies. If '>' is part * of the category name, please replace it with other character(s). Category pages include special * pages such as sales or promotions. For instance, a special sale page may have the category * hierarchy: "pageCategories" : ["Sales > 2017 Black Friday Deals"]. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getPageCategories() { return pageCategories; } /** * The categories associated with a category page. Required for category navigation queries to * achieve good search quality. The format should be the same as UserEvent.page_categories; To * represent full path of category, use '>' sign to separate different hierarchies. If '>' is part * of the category name, please replace it with other character(s). Category pages include special * pages such as sales or promotions. For instance, a special sale page may have the category * hierarchy: "pageCategories" : ["Sales > 2017 Black Friday Deals"]. * @param pageCategories pageCategories or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setPageCategories(java.util.List<java.lang.String> pageCategories) { this.pageCategories = pageCategories; return this; } /** * Maximum number of Products to return. If unspecified, defaults to a reasonable value. The * maximum allowed value is 120. Values above 120 will be coerced to 120. If this field is * negative, an INVALID_ARGUMENT is returned. * @return value or {@code null} for none */ public java.lang.Integer getPageSize() { return pageSize; } /** * Maximum number of Products to return. If unspecified, defaults to a reasonable value. The * maximum allowed value is 120. Values above 120 will be coerced to 120. If this field is * negative, an INVALID_ARGUMENT is returned. * @param pageSize pageSize or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setPageSize(java.lang.Integer pageSize) { this.pageSize = pageSize; return this; } /** * A page token SearchResponse.next_page_token, received from a previous SearchService.Search * call. Provide this to retrieve the subsequent page. When paginating, all other parameters * provided to SearchService.Search must match the call that provided the page token. Otherwise, * an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getPageToken() { return pageToken; } /** * A page token SearchResponse.next_page_token, received from a previous SearchService.Search * call. Provide this to retrieve the subsequent page. When paginating, all other parameters * provided to SearchService.Search must match the call that provided the page token. Otherwise, * an INVALID_ARGUMENT error is returned. * @param pageToken pageToken or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setPageToken(java.lang.String pageToken) { this.pageToken = pageToken; return this; } /** * The specification for personalization. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequestPersonalizationSpec getPersonalizationSpec() { return personalizationSpec; } /** * The specification for personalization. * @param personalizationSpec personalizationSpec or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setPersonalizationSpec(GoogleCloudRetailV2alphaSearchRequestPersonalizationSpec personalizationSpec) { this.personalizationSpec = personalizationSpec; return this; } /** * Raw search query. If this field is empty, the request is considered a category browsing request * and returned results are based on filter and page_categories. * @return value or {@code null} for none */ public java.lang.String getQuery() { return query; } /** * Raw search query. If this field is empty, the request is considered a category browsing request * and returned results are based on filter and page_categories. * @param query query or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setQuery(java.lang.String query) { this.query = query; return this; } /** * The query expansion specification that specifies the conditions under which query expansion * will occur. See more details at this [user guide](https://cloud.google.com/retail/docs/result- * size#query_expansion). * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequestQueryExpansionSpec getQueryExpansionSpec() { return queryExpansionSpec; } /** * The query expansion specification that specifies the conditions under which query expansion * will occur. See more details at this [user guide](https://cloud.google.com/retail/docs/result- * size#query_expansion). * @param queryExpansionSpec queryExpansionSpec or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setQueryExpansionSpec(GoogleCloudRetailV2alphaSearchRequestQueryExpansionSpec queryExpansionSpec) { this.queryExpansionSpec = queryExpansionSpec; return this; } /** * The relevance threshold of the search results. Defaults to RelevanceThreshold.HIGH, which means * only the most relevant results are shown, and the least number of results are returned. See * more details at this [user guide](https://cloud.google.com/retail/docs/result- * size#relevance_thresholding). * @return value or {@code null} for none */ public java.lang.String getRelevanceThreshold() { return relevanceThreshold; } /** * The relevance threshold of the search results. Defaults to RelevanceThreshold.HIGH, which means * only the most relevant results are shown, and the least number of results are returned. See * more details at this [user guide](https://cloud.google.com/retail/docs/result- * size#relevance_thresholding). * @param relevanceThreshold relevanceThreshold or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setRelevanceThreshold(java.lang.String relevanceThreshold) { this.relevanceThreshold = relevanceThreshold; return this; } /** * The search mode of the search request. If not specified, a single search request triggers both * product search and faceted search. * @return value or {@code null} for none */ public java.lang.String getSearchMode() { return searchMode; } /** * The search mode of the search request. If not specified, a single search request triggers both * product search and faceted search. * @param searchMode searchMode or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setSearchMode(java.lang.String searchMode) { this.searchMode = searchMode; return this; } /** * The spell correction specification that specifies the mode under which spell correction will * take effect. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequestSpellCorrectionSpec getSpellCorrectionSpec() { return spellCorrectionSpec; } /** * The spell correction specification that specifies the mode under which spell correction will * take effect. * @param spellCorrectionSpec spellCorrectionSpec or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setSpellCorrectionSpec(GoogleCloudRetailV2alphaSearchRequestSpellCorrectionSpec spellCorrectionSpec) { this.spellCorrectionSpec = spellCorrectionSpec; return this; } /** * User information. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaUserInfo getUserInfo() { return userInfo; } /** * User information. * @param userInfo userInfo or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setUserInfo(GoogleCloudRetailV2alphaUserInfo userInfo) { this.userInfo = userInfo; return this; } /** * The keys to fetch and rollup the matching variant Products attributes, FulfillmentInfo or * LocalInventorys attributes. The attributes from all the matching variant Products or * LocalInventorys are merged and de-duplicated. Notice that rollup attributes will lead to extra * query latency. Maximum number of keys is 30. For FulfillmentInfo, a fulfillment type and a * fulfillment ID must be provided in the format of "fulfillmentType.fulfillmentId". E.g., in * "pickupInStore.store123", "pickupInStore" is fulfillment type and "store123" is the store ID. * Supported keys are: * colorFamilies * price * originalPrice * discount * variantId * * inventory(place_id,price) * inventory(place_id,original_price) * * inventory(place_id,attributes.key), where key is any key in the * Product.local_inventories.attributes map. * attributes.key, where key is any key in the * Product.attributes map. * pickupInStore.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "pickup-in-store". * shipToStore.id, where id is any * FulfillmentInfo.place_ids for FulfillmentInfo.type "ship-to-store". * sameDayDelivery.id, where * id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "same-day-delivery". * * nextDayDelivery.id, where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "next- * day-delivery". * customFulfillment1.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "custom-type-1". * customFulfillment2.id, where id is any * FulfillmentInfo.place_ids for FulfillmentInfo.type "custom-type-2". * customFulfillment3.id, * where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "custom-type-3". * * customFulfillment4.id, where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type * "custom-type-4". * customFulfillment5.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "custom-type-5". If this field is set to an invalid value other than * these, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getVariantRollupKeys() { return variantRollupKeys; } /** * The keys to fetch and rollup the matching variant Products attributes, FulfillmentInfo or * LocalInventorys attributes. The attributes from all the matching variant Products or * LocalInventorys are merged and de-duplicated. Notice that rollup attributes will lead to extra * query latency. Maximum number of keys is 30. For FulfillmentInfo, a fulfillment type and a * fulfillment ID must be provided in the format of "fulfillmentType.fulfillmentId". E.g., in * "pickupInStore.store123", "pickupInStore" is fulfillment type and "store123" is the store ID. * Supported keys are: * colorFamilies * price * originalPrice * discount * variantId * * inventory(place_id,price) * inventory(place_id,original_price) * * inventory(place_id,attributes.key), where key is any key in the * Product.local_inventories.attributes map. * attributes.key, where key is any key in the * Product.attributes map. * pickupInStore.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "pickup-in-store". * shipToStore.id, where id is any * FulfillmentInfo.place_ids for FulfillmentInfo.type "ship-to-store". * sameDayDelivery.id, where * id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "same-day-delivery". * * nextDayDelivery.id, where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "next- * day-delivery". * customFulfillment1.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "custom-type-1". * customFulfillment2.id, where id is any * FulfillmentInfo.place_ids for FulfillmentInfo.type "custom-type-2". * customFulfillment3.id, * where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type "custom-type-3". * * customFulfillment4.id, where id is any FulfillmentInfo.place_ids for FulfillmentInfo.type * "custom-type-4". * customFulfillment5.id, where id is any FulfillmentInfo.place_ids for * FulfillmentInfo.type "custom-type-5". If this field is set to an invalid value other than * these, an INVALID_ARGUMENT error is returned. * @param variantRollupKeys variantRollupKeys or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setVariantRollupKeys(java.util.List<java.lang.String> variantRollupKeys) { this.variantRollupKeys = variantRollupKeys; return this; } /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor logs in or out of the website. This * should be the same identifier as UserEvent.visitor_id. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getVisitorId() { return visitorId; } /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor logs in or out of the website. This * should be the same identifier as UserEvent.visitor_id. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. * @param visitorId visitorId or {@code null} for none */ public GoogleCloudRetailV2alphaSearchRequest setVisitorId(java.lang.String visitorId) { this.visitorId = visitorId; return this; } @Override public GoogleCloudRetailV2alphaSearchRequest set(String fieldName, Object value) { return (GoogleCloudRetailV2alphaSearchRequest) super.set(fieldName, value); } @Override public GoogleCloudRetailV2alphaSearchRequest clone() { return (GoogleCloudRetailV2alphaSearchRequest) super.clone(); } }
googleapis/google-cloud-java
36,951
java-api-gateway/proto-google-cloud-api-gateway-v1/src/main/java/com/google/cloud/apigateway/v1/ListApisRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apigateway/v1/apigateway.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apigateway.v1; /** * * * <pre> * Request message for ApiGatewayService.ListApis * </pre> * * Protobuf type {@code google.cloud.apigateway.v1.ListApisRequest} */ public final class ListApisRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apigateway.v1.ListApisRequest) ListApisRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListApisRequest.newBuilder() to construct. private ListApisRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListApisRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; orderBy_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListApisRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apigateway.v1.Apigateway .internal_static_google_cloud_apigateway_v1_ListApisRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apigateway.v1.Apigateway .internal_static_google_cloud_apigateway_v1_ListApisRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apigateway.v1.ListApisRequest.class, com.google.cloud.apigateway.v1.ListApisRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent resource of the API, of the form: * `projects/&#42;&#47;locations/global` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Parent resource of the API, of the form: * `projects/&#42;&#47;locations/global` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Page size. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Filter. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Filter. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object orderBy_ = ""; /** * * * <pre> * Order by parameters. * </pre> * * <code>string order_by = 5;</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Order by parameters. * </pre> * * <code>string order_by = 5;</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apigateway.v1.ListApisRequest)) { return super.equals(obj); } com.google.cloud.apigateway.v1.ListApisRequest other = (com.google.cloud.apigateway.v1.ListApisRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apigateway.v1.ListApisRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apigateway.v1.ListApisRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apigateway.v1.ListApisRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.apigateway.v1.ListApisRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for ApiGatewayService.ListApis * </pre> * * Protobuf type {@code google.cloud.apigateway.v1.ListApisRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apigateway.v1.ListApisRequest) com.google.cloud.apigateway.v1.ListApisRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apigateway.v1.Apigateway .internal_static_google_cloud_apigateway_v1_ListApisRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apigateway.v1.Apigateway .internal_static_google_cloud_apigateway_v1_ListApisRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apigateway.v1.ListApisRequest.class, com.google.cloud.apigateway.v1.ListApisRequest.Builder.class); } // Construct using com.google.cloud.apigateway.v1.ListApisRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; orderBy_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apigateway.v1.Apigateway .internal_static_google_cloud_apigateway_v1_ListApisRequest_descriptor; } @java.lang.Override public com.google.cloud.apigateway.v1.ListApisRequest getDefaultInstanceForType() { return com.google.cloud.apigateway.v1.ListApisRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apigateway.v1.ListApisRequest build() { com.google.cloud.apigateway.v1.ListApisRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apigateway.v1.ListApisRequest buildPartial() { com.google.cloud.apigateway.v1.ListApisRequest result = new com.google.cloud.apigateway.v1.ListApisRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.apigateway.v1.ListApisRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.orderBy_ = orderBy_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apigateway.v1.ListApisRequest) { return mergeFrom((com.google.cloud.apigateway.v1.ListApisRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.apigateway.v1.ListApisRequest other) { if (other == com.google.cloud.apigateway.v1.ListApisRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { orderBy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent resource of the API, of the form: * `projects/&#42;&#47;locations/global` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Parent resource of the API, of the form: * `projects/&#42;&#47;locations/global` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Parent resource of the API, of the form: * `projects/&#42;&#47;locations/global` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parent resource of the API, of the form: * `projects/&#42;&#47;locations/global` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Parent resource of the API, of the form: * `projects/&#42;&#47;locations/global` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Page size. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Page size. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Page size. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Page token. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Filter. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Filter. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Filter. * </pre> * * <code>string filter = 4;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Filter. * </pre> * * <code>string filter = 4;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Filter. * </pre> * * <code>string filter = 4;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Order by parameters. * </pre> * * <code>string order_by = 5;</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Order by parameters. * </pre> * * <code>string order_by = 5;</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Order by parameters. * </pre> * * <code>string order_by = 5;</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * Order by parameters. * </pre> * * <code>string order_by = 5;</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * * * <pre> * Order by parameters. * </pre> * * <code>string order_by = 5;</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apigateway.v1.ListApisRequest) } // @@protoc_insertion_point(class_scope:google.cloud.apigateway.v1.ListApisRequest) private static final com.google.cloud.apigateway.v1.ListApisRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apigateway.v1.ListApisRequest(); } public static com.google.cloud.apigateway.v1.ListApisRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListApisRequest> PARSER = new com.google.protobuf.AbstractParser<ListApisRequest>() { @java.lang.Override public ListApisRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListApisRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListApisRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apigateway.v1.ListApisRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
37,277
flink-runtime/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.graph; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.attribute.Attribute; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.ConfigOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.memory.ManagedMemoryUseCase; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.state.CheckpointStorage; import org.apache.flink.runtime.state.StateBackend; import org.apache.flink.runtime.util.config.memory.ManagedMemoryUtils; import org.apache.flink.streaming.api.operators.InternalTimeServiceManager; import org.apache.flink.streaming.api.operators.SimpleOperatorFactory; import org.apache.flink.streaming.api.operators.StreamOperator; import org.apache.flink.streaming.api.operators.StreamOperatorFactory; import org.apache.flink.streaming.runtime.tasks.StreamTaskException; import org.apache.flink.streaming.runtime.watermark.AbstractInternalWatermarkDeclaration; import org.apache.flink.util.ClassLoaderUtil; import org.apache.flink.util.InstantiationUtil; import org.apache.flink.util.OutputTag; import org.apache.flink.util.SerializedValue; import org.apache.flink.util.concurrent.FutureUtils; import javax.annotation.Nullable; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.function.Function; import java.util.stream.Collectors; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; /** * Internal configuration for a {@link StreamOperator}. This is created and populated by the {@link * StreamingJobGraphGenerator}. * * <p>NOTE TO IMPLEMENTERS: Please do not set public ConfigOption to this class. Use the job * Configuration instead! See {@link * org.apache.flink.configuration.CheckpointingOptions#ENABLE_UNALIGNED} for a reference * implementation. */ @Internal public class StreamConfig implements Serializable { private static final long serialVersionUID = 1L; // ------------------------------------------------------------------------ // Config Keys // ------------------------------------------------------------------------ public static final String SERIALIZED_UDF = "serializedUDF"; /** * Introduce serializedUdfClassName to avoid unnecessarily heavy {@link * #getStreamOperatorFactory}. */ public static final String SERIALIZED_UDF_CLASS = "serializedUdfClass"; private static final ConfigOption<Integer> NUMBER_OF_OUTPUTS = ConfigOptions.key("numberOfOutputs").intType().defaultValue(0); private static final ConfigOption<Integer> NUMBER_OF_NETWORK_INPUTS = ConfigOptions.key("numberOfNetworkInputs").intType().defaultValue(0); private static final String CHAINED_OUTPUTS = "chainedOutputs"; private static final String CHAINED_TASK_CONFIG = "chainedTaskConfig_"; private static final ConfigOption<Boolean> IS_CHAINED_VERTEX = ConfigOptions.key("isChainedSubtask").booleanType().defaultValue(false); private static final ConfigOption<Integer> CHAIN_INDEX = ConfigOptions.key("chainIndex").intType().defaultValue(0); private static final ConfigOption<Integer> VERTEX_NAME = ConfigOptions.key("vertexID").intType().defaultValue(-1); private static final String ITERATION_ID = "iterationId"; private static final String INPUTS = "inputs"; private static final String TYPE_SERIALIZER_OUT_1 = "typeSerializer_out"; private static final String TYPE_SERIALIZER_SIDEOUT_PREFIX = "typeSerializer_sideout_"; private static final ConfigOption<Long> ITERATON_WAIT = ConfigOptions.key("iterationWait").longType().defaultValue(0L); private static final String OP_NONCHAINED_OUTPUTS = "opNonChainedOutputs"; private static final String VERTEX_NONCHAINED_OUTPUTS = "vertexNonChainedOutputs"; private static final String IN_STREAM_EDGES = "inStreamEdges"; private static final String OPERATOR_NAME = "operatorName"; private static final String OPERATOR_ID = "operatorID"; private static final ConfigOption<Boolean> CHAIN_END = ConfigOptions.key("chainEnd").booleanType().defaultValue(false); private static final ConfigOption<Boolean> GRAPH_CONTAINING_LOOPS = ConfigOptions.key("graphContainingLoops").booleanType().defaultValue(false); private static final String ADDITIONAL_METRIC_VARIABLES = "additionalmetricvariables"; private static final String CHECKPOINT_STORAGE = "checkpointstorage"; private static final String STATE_BACKEND = "statebackend"; private static final String TIMER_SERVICE_PROVIDER = "timerservice"; private static final String STATE_PARTITIONER = "statePartitioner"; private static final String STATE_KEY_SERIALIZER = "statekeyser"; private static final String MANAGED_MEMORY_FRACTION_PREFIX = "managedMemFraction."; private static final String ATTRIBUTE = "attribute"; private static final String WATERMARK_DECLARATIONS = "watermarkDeclarations"; /** * To reduce the deserialization overhead of reading the WatermarkDeclaration, we'll store the * result after the first deserialization, and return it directly in subsequent reading * requests. */ private Set<AbstractInternalWatermarkDeclaration<?>> deserializedWatermarkDeclarations; private static final ConfigOption<Boolean> STATE_BACKEND_USE_MANAGED_MEMORY = ConfigOptions.key("statebackend.useManagedMemory") .booleanType() .noDefaultValue() .withDescription( "If state backend is specified, whether it uses managed memory."); // ------------------------------------------------------------------------ // Default Values // ------------------------------------------------------------------------ private static final double DEFAULT_MANAGED_MEMORY_FRACTION = 0.0; // ------------------------------------------------------------------------ // Config // ------------------------------------------------------------------------ private final Configuration config; // To make the parallelization of the StreamConfig serialization easier, we use this map // to collect all the need-to-be-serialized objects. These objects will be serialized all at // once then. private final transient Map<String, Object> toBeSerializedConfigObjects = new HashMap<>(); private final transient Map<Integer, CompletableFuture<StreamConfig>> chainedTaskFutures = new HashMap<>(); private final transient CompletableFuture<StreamConfig> serializationFuture = new CompletableFuture<>(); /** * In order to release memory during processing data, some keys are removed in {@link * #clearInitialConfigs()}. Recording these keys here to prevent they are accessed after * removing. */ private final Set<String> removedKeys = new HashSet<>(); public StreamConfig(Configuration config) { this.config = config; } public Configuration getConfiguration() { return config; } public CompletableFuture<StreamConfig> getSerializationFuture() { return serializationFuture; } /** Trigger the object config serialization and return the completable future. */ public CompletableFuture<StreamConfig> triggerSerializationAndReturnFuture( Executor ioExecutor) { FutureUtils.combineAll(chainedTaskFutures.values()) .thenAcceptAsync( chainedConfigs -> { try { // Serialize all the objects to config. serializeAllConfigs(); InstantiationUtil.writeObjectToConfig( chainedConfigs.stream() .collect( Collectors.toMap( StreamConfig::getVertexID, Function.identity())), this.config, CHAINED_TASK_CONFIG); serializationFuture.complete(this); } catch (Throwable throwable) { serializationFuture.completeExceptionally(throwable); } }, ioExecutor); return serializationFuture; } /** * Serialize all object configs synchronously. Only used for operators which need to reconstruct * the StreamConfig internally or test. */ public void serializeAllConfigs() { toBeSerializedConfigObjects.forEach( (key, object) -> { try { InstantiationUtil.writeObjectToConfig(object, this.config, key); } catch (IOException e) { throw new StreamTaskException( String.format("Could not serialize object for key %s.", key), e); } }); } @VisibleForTesting public void setAndSerializeTransitiveChainedTaskConfigs( Map<Integer, StreamConfig> chainedTaskConfigs) { try { InstantiationUtil.writeObjectToConfig( chainedTaskConfigs, this.config, CHAINED_TASK_CONFIG); } catch (IOException e) { throw new StreamTaskException( "Could not serialize object for key chained task config.", e); } } // ------------------------------------------------------------------------ // Configured Properties // ------------------------------------------------------------------------ public void setVertexID(Integer vertexID) { config.set(VERTEX_NAME, vertexID); } public Integer getVertexID() { return config.get(VERTEX_NAME); } /** Fraction of managed memory reserved for the given use case that this operator should use. */ public void setManagedMemoryFractionOperatorOfUseCase( ManagedMemoryUseCase managedMemoryUseCase, double fraction) { final ConfigOption<Double> configOption = getManagedMemoryFractionConfigOption(managedMemoryUseCase); checkArgument( fraction >= 0.0 && fraction <= 1.0, String.format( "%s should be in range [0.0, 1.0], but was: %s", configOption.key(), fraction)); config.set(configOption, fraction); } /** * Fraction of total managed memory in the slot that this operator should use for the given use * case. */ public double getManagedMemoryFractionOperatorUseCaseOfSlot( ManagedMemoryUseCase managedMemoryUseCase, Configuration jobConfig, Configuration taskManagerConfig, ClassLoader cl) { return ManagedMemoryUtils.convertToFractionOfSlot( managedMemoryUseCase, config.get(getManagedMemoryFractionConfigOption(managedMemoryUseCase)), getAllManagedMemoryUseCases(), jobConfig, taskManagerConfig, config.getOptional(STATE_BACKEND_USE_MANAGED_MEMORY), cl); } private static ConfigOption<Double> getManagedMemoryFractionConfigOption( ManagedMemoryUseCase managedMemoryUseCase) { return ConfigOptions.key( MANAGED_MEMORY_FRACTION_PREFIX + checkNotNull(managedMemoryUseCase)) .doubleType() .defaultValue(DEFAULT_MANAGED_MEMORY_FRACTION); } private Set<ManagedMemoryUseCase> getAllManagedMemoryUseCases() { return config.keySet().stream() .filter((key) -> key.startsWith(MANAGED_MEMORY_FRACTION_PREFIX)) .map( (key) -> ManagedMemoryUseCase.valueOf( key.replaceFirst(MANAGED_MEMORY_FRACTION_PREFIX, ""))) .collect(Collectors.toSet()); } public void setTypeSerializerOut(TypeSerializer<?> serializer) { setTypeSerializer(TYPE_SERIALIZER_OUT_1, serializer); } public <T> TypeSerializer<T> getTypeSerializerOut(ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, TYPE_SERIALIZER_OUT_1, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate serializer.", e); } } public void setTypeSerializerSideOut(OutputTag<?> outputTag, TypeSerializer<?> serializer) { setTypeSerializer(TYPE_SERIALIZER_SIDEOUT_PREFIX + outputTag.getId(), serializer); } private void setTypeSerializer(String key, TypeSerializer<?> typeWrapper) { toBeSerializedConfigObjects.put(key, typeWrapper); } public <T> TypeSerializer<T> getTypeSerializerSideOut(OutputTag<?> outputTag, ClassLoader cl) { checkNotNull(outputTag, "Side output id must not be null."); try { return InstantiationUtil.readObjectFromConfig( this.config, TYPE_SERIALIZER_SIDEOUT_PREFIX + outputTag.getId(), cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate serializer.", e); } } public void setWatermarkDeclarations(byte[] serializedWatermarkDeclarations) { if (serializedWatermarkDeclarations != null) { config.setBytes(WATERMARK_DECLARATIONS, serializedWatermarkDeclarations); } } public Set<AbstractInternalWatermarkDeclaration<?>> getWatermarkDeclarations(ClassLoader cl) { if (deserializedWatermarkDeclarations != null) { return deserializedWatermarkDeclarations; } try { Set<AbstractInternalWatermarkDeclaration<?>> watermarkDeclarations = InstantiationUtil.readObjectFromConfig(this.config, WATERMARK_DECLARATIONS, cl); deserializedWatermarkDeclarations = watermarkDeclarations == null ? Collections.emptySet() : watermarkDeclarations; return deserializedWatermarkDeclarations; } catch (Exception e) { throw new StreamTaskException("Could not instantiate serializer.", e); } } public void setupNetworkInputs(TypeSerializer<?>... serializers) { InputConfig[] inputs = new InputConfig[serializers.length]; for (int i = 0; i < serializers.length; i++) { inputs[i] = new NetworkInputConfig(serializers[i], i, InputRequirement.PASS_THROUGH); } setInputs(inputs); } public void setInputs(InputConfig... inputs) { toBeSerializedConfigObjects.put(INPUTS, inputs); } public InputConfig[] getInputs(ClassLoader cl) { try { InputConfig[] inputs = InstantiationUtil.readObjectFromConfig(this.config, INPUTS, cl); if (inputs == null) { return new InputConfig[0]; } return inputs; } catch (Exception e) { throw new StreamTaskException("Could not deserialize inputs", e); } } @Deprecated public <T> TypeSerializer<T> getTypeSerializerIn1(ClassLoader cl) { return getTypeSerializerIn(0, cl); } @Deprecated public <T> TypeSerializer<T> getTypeSerializerIn2(ClassLoader cl) { return getTypeSerializerIn(1, cl); } public <T> TypeSerializer<T> getTypeSerializerIn(int index, ClassLoader cl) { InputConfig[] inputs = getInputs(cl); checkState(index < inputs.length); checkState( inputs[index] instanceof NetworkInputConfig, "Input [%s] was assumed to be network input", index); return (TypeSerializer<T>) ((NetworkInputConfig) inputs[index]).typeSerializer; } @VisibleForTesting public void setStreamOperator(StreamOperator<?> operator) { setStreamOperatorFactory(SimpleOperatorFactory.of(operator)); } public void setStreamOperatorFactory(StreamOperatorFactory<?> factory) { if (factory != null) { toBeSerializedConfigObjects.put(SERIALIZED_UDF, factory); toBeSerializedConfigObjects.put(SERIALIZED_UDF_CLASS, factory.getClass()); } } @VisibleForTesting public <T extends StreamOperator<?>> T getStreamOperator(ClassLoader cl) { SimpleOperatorFactory<?> factory = getStreamOperatorFactory(cl); return (T) factory.getOperator(); } public <T extends StreamOperatorFactory<?>> T getStreamOperatorFactory(ClassLoader cl) { try { checkState( !removedKeys.contains(SERIALIZED_UDF), String.format("%s has been removed.", SERIALIZED_UDF)); return InstantiationUtil.readObjectFromConfig(this.config, SERIALIZED_UDF, cl); } catch (ClassNotFoundException e) { String classLoaderInfo = ClassLoaderUtil.getUserCodeClassLoaderInfo(cl); boolean loadableDoubleCheck = ClassLoaderUtil.validateClassLoadable(e, cl); String exceptionMessage = "Cannot load user class: " + e.getMessage() + "\nClassLoader info: " + classLoaderInfo + (loadableDoubleCheck ? "\nClass was actually found in classloader - deserialization issue." : "\nClass not resolvable through given classloader."); throw new StreamTaskException(exceptionMessage, e); } catch (Exception e) { throw new StreamTaskException("Cannot instantiate user function.", e); } } public <T extends StreamOperatorFactory<?>> Class<T> getStreamOperatorFactoryClass( ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, SERIALIZED_UDF_CLASS, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate serialized udf class.", e); } } public void setIterationId(String iterationId) { config.setString(ITERATION_ID, iterationId); } public String getIterationId() { return config.getString(ITERATION_ID, ""); } public void setIterationWaitTime(long time) { config.set(ITERATON_WAIT, time); } public long getIterationWaitTime() { return config.get(ITERATON_WAIT); } public void setNumberOfNetworkInputs(int numberOfInputs) { config.set(NUMBER_OF_NETWORK_INPUTS, numberOfInputs); } public int getNumberOfNetworkInputs() { return config.get(NUMBER_OF_NETWORK_INPUTS); } public void setNumberOfOutputs(int numberOfOutputs) { config.set(NUMBER_OF_OUTPUTS, numberOfOutputs); } public int getNumberOfOutputs() { return config.get(NUMBER_OF_OUTPUTS); } /** Sets the operator level non-chained outputs. */ public void setOperatorNonChainedOutputs(List<NonChainedOutput> nonChainedOutputs) { toBeSerializedConfigObjects.put(OP_NONCHAINED_OUTPUTS, nonChainedOutputs); } public List<NonChainedOutput> getOperatorNonChainedOutputs(ClassLoader cl) { try { List<NonChainedOutput> nonChainedOutputs = InstantiationUtil.readObjectFromConfig(this.config, OP_NONCHAINED_OUTPUTS, cl); return nonChainedOutputs == null ? new ArrayList<>() : nonChainedOutputs; } catch (Exception e) { throw new StreamTaskException("Could not instantiate non chained outputs.", e); } } public void setChainedOutputs(List<StreamEdge> chainedOutputs) { toBeSerializedConfigObjects.put(CHAINED_OUTPUTS, chainedOutputs); } public List<StreamEdge> getChainedOutputs(ClassLoader cl) { try { List<StreamEdge> chainedOutputs = InstantiationUtil.readObjectFromConfig(this.config, CHAINED_OUTPUTS, cl); return chainedOutputs == null ? new ArrayList<StreamEdge>() : chainedOutputs; } catch (Exception e) { throw new StreamTaskException("Could not instantiate chained outputs.", e); } } public void setInPhysicalEdges(List<StreamEdge> inEdges) { toBeSerializedConfigObjects.put(IN_STREAM_EDGES, inEdges); } public List<StreamEdge> getInPhysicalEdges(ClassLoader cl) { try { List<StreamEdge> inEdges = InstantiationUtil.readObjectFromConfig(this.config, IN_STREAM_EDGES, cl); return inEdges == null ? new ArrayList<StreamEdge>() : inEdges; } catch (Exception e) { throw new StreamTaskException("Could not instantiate inputs.", e); } } /** * Sets the job vertex level non-chained outputs. The given output list must have the same order * with {@link JobVertex#getProducedDataSets()}. */ public void setVertexNonChainedOutputs(List<NonChainedOutput> nonChainedOutputs) { toBeSerializedConfigObjects.put(VERTEX_NONCHAINED_OUTPUTS, nonChainedOutputs); } public List<NonChainedOutput> getVertexNonChainedOutputs(ClassLoader cl) { try { List<NonChainedOutput> nonChainedOutputs = InstantiationUtil.readObjectFromConfig( this.config, VERTEX_NONCHAINED_OUTPUTS, cl); return nonChainedOutputs == null ? new ArrayList<>() : nonChainedOutputs; } catch (Exception e) { throw new StreamTaskException("Could not instantiate outputs in order.", e); } } public void setTransitiveChainedTaskConfigs(Map<Integer, StreamConfig> chainedTaskConfigs) { if (chainedTaskConfigs != null) { chainedTaskConfigs.forEach( (id, config) -> chainedTaskFutures.put(id, config.getSerializationFuture())); } } public Map<Integer, StreamConfig> getTransitiveChainedTaskConfigs(ClassLoader cl) { try { checkState( !removedKeys.contains(CHAINED_TASK_CONFIG), String.format("%s has been removed.", CHAINED_TASK_CONFIG)); Map<Integer, StreamConfig> confs = InstantiationUtil.readObjectFromConfig(this.config, CHAINED_TASK_CONFIG, cl); return confs == null ? new HashMap<Integer, StreamConfig>() : confs; } catch (Exception e) { throw new StreamTaskException("Could not instantiate configuration.", e); } } public Map<Integer, StreamConfig> getTransitiveChainedTaskConfigsWithSelf(ClassLoader cl) { // TODO: could this logic be moved to the user of #setTransitiveChainedTaskConfigs() ? Map<Integer, StreamConfig> chainedTaskConfigs = getTransitiveChainedTaskConfigs(cl); chainedTaskConfigs.put(getVertexID(), this); return chainedTaskConfigs; } public void setOperatorID(OperatorID operatorID) { this.config.setBytes(OPERATOR_ID, operatorID.getBytes()); } public OperatorID getOperatorID() { byte[] operatorIDBytes = config.getBytes(OPERATOR_ID, null); return new OperatorID(checkNotNull(operatorIDBytes)); } public void setOperatorName(String name) { this.config.setString(OPERATOR_NAME, name); } public String getOperatorName() { return this.config.getString(OPERATOR_NAME, null); } public void setChainIndex(int index) { this.config.set(CHAIN_INDEX, index); } public int getChainIndex() { return this.config.get(CHAIN_INDEX); } // ------------------------------------------------------------------------ // State backend // ------------------------------------------------------------------------ public void setStateBackend(StateBackend backend) { if (backend != null) { toBeSerializedConfigObjects.put(STATE_BACKEND, backend); setStateBackendUsesManagedMemory(backend.useManagedMemory()); } } @VisibleForTesting public void setStateBackendUsesManagedMemory(boolean usesManagedMemory) { this.config.set(STATE_BACKEND_USE_MANAGED_MEMORY, usesManagedMemory); } public void setSerializedStateBackend( SerializedValue<StateBackend> serializedStateBackend, boolean useManagedMemory) { if (serializedStateBackend != null) { this.config.setBytes(STATE_BACKEND, serializedStateBackend.getByteArray()); setStateBackendUsesManagedMemory(useManagedMemory); } } public void setSerializedCheckpointStorage( SerializedValue<CheckpointStorage> serializedCheckpointStorage) { if (serializedCheckpointStorage != null) { this.config.setBytes(CHECKPOINT_STORAGE, serializedCheckpointStorage.getByteArray()); } } public StateBackend getStateBackend(ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, STATE_BACKEND, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate statehandle provider.", e); } } public void setAdditionalMetricVariables( @Nullable Map<String, String> additionalMetricVariables) { if (additionalMetricVariables != null) { toBeSerializedConfigObjects.put(ADDITIONAL_METRIC_VARIABLES, additionalMetricVariables); } } public Map<String, String> getAdditionalMetricVariables() { try { Map<String, String> additionalMetricVariables = InstantiationUtil.readObjectFromConfig( this.config, ADDITIONAL_METRIC_VARIABLES, this.getClass().getClassLoader()); return additionalMetricVariables == null ? Collections.emptyMap() : additionalMetricVariables; } catch (Exception e) { throw new StreamTaskException("Could not instantiate additional metric variables.", e); } } @VisibleForTesting public void setCheckpointStorage(CheckpointStorage storage) { if (storage != null) { toBeSerializedConfigObjects.put(CHECKPOINT_STORAGE, storage); } } public CheckpointStorage getCheckpointStorage(ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, CHECKPOINT_STORAGE, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate checkpoint storage.", e); } } public void setTimerServiceProvider(InternalTimeServiceManager.Provider timerServiceProvider) { if (timerServiceProvider != null) { toBeSerializedConfigObjects.put(TIMER_SERVICE_PROVIDER, timerServiceProvider); } } public InternalTimeServiceManager.Provider getTimerServiceProvider(ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, TIMER_SERVICE_PROVIDER, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate timer service provider.", e); } } public void setStatePartitioner(int input, KeySelector<?, ?> partitioner) { toBeSerializedConfigObjects.put(STATE_PARTITIONER + input, partitioner); } public <IN, K extends Serializable> KeySelector<IN, K> getStatePartitioner( int input, ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig( this.config, STATE_PARTITIONER + input, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate state partitioner.", e); } } public void setStateKeySerializer(TypeSerializer<?> serializer) { toBeSerializedConfigObjects.put(STATE_KEY_SERIALIZER, serializer); } public <K> TypeSerializer<K> getStateKeySerializer(ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, STATE_KEY_SERIALIZER, cl); } catch (Exception e) { throw new StreamTaskException( "Could not instantiate state key serializer from task config.", e); } } // ------------------------------------------------------------------------ // Miscellaneous // ------------------------------------------------------------------------ public void setChainStart() { config.set(IS_CHAINED_VERTEX, true); } public boolean isChainStart() { return config.get(IS_CHAINED_VERTEX); } public void setChainEnd() { config.set(CHAIN_END, true); } public boolean isChainEnd() { return config.get(CHAIN_END); } @Override public String toString() { ClassLoader cl = getClass().getClassLoader(); StringBuilder builder = new StringBuilder(); builder.append("\n======================="); builder.append("Stream Config"); builder.append("======================="); builder.append("\nNumber of non-chained inputs: ").append(getNumberOfNetworkInputs()); builder.append("\nNumber of non-chained outputs: ").append(getNumberOfOutputs()); builder.append("\nOutput names: ").append(getOperatorNonChainedOutputs(cl)); builder.append("\nPartitioning:"); for (NonChainedOutput output : getOperatorNonChainedOutputs(cl)) { String outputName = output.getDataSetId().toString(); builder.append("\n\t").append(outputName).append(": ").append(output.getPartitioner()); } builder.append("\nChained subtasks: ").append(getChainedOutputs(cl)); try { builder.append("\nOperator: ") .append(getStreamOperatorFactoryClass(cl).getSimpleName()); } catch (Exception e) { builder.append("\nOperator: Missing"); } if (isChainStart() && getChainedOutputs(cl).size() > 0) { builder.append( "\n\n\n---------------------\nChained task configs\n---------------------\n"); builder.append(getTransitiveChainedTaskConfigs(cl)); } return builder.toString(); } public void setGraphContainingLoops(boolean graphContainingLoops) { config.set(GRAPH_CONTAINING_LOOPS, graphContainingLoops); } public boolean isGraphContainingLoops() { return config.get(GRAPH_CONTAINING_LOOPS); } public void setAttribute(Attribute attribute) { if (attribute != null) { toBeSerializedConfigObjects.put(ATTRIBUTE, attribute); } } public Attribute getAttribute(ClassLoader cl) { try { return InstantiationUtil.readObjectFromConfig(this.config, ATTRIBUTE, cl); } catch (Exception e) { throw new StreamTaskException("Could not instantiate checkpoint storage.", e); } } /** * In general, we don't clear any configuration. However, the {@link #SERIALIZED_UDF} may be * very large when operator includes some large objects, the SERIALIZED_UDF is used to create a * StreamOperator and usually only needs to be called once. {@link #CHAINED_TASK_CONFIG} may be * large as well due to the StreamConfig of all non-head operators in OperatorChain will be * serialized and stored in CHAINED_TASK_CONFIG. They can be cleared to reduce the memory after * StreamTask is initialized. If so, TM will have more memory during running. See FLINK-33315 * and FLINK-33317 for more information. */ public void clearInitialConfigs() { removedKeys.add(SERIALIZED_UDF); config.removeKey(SERIALIZED_UDF); removedKeys.add(CHAINED_TASK_CONFIG); config.removeKey(CHAINED_TASK_CONFIG); } /** * Requirements of the different inputs of an operator. Each input can have a different * requirement. For all {@link #SORTED} inputs, records are sorted/grouped by key and all * records of a given key are passed to the operator consecutively before moving on to the next * group. */ public enum InputRequirement { /** * Records from all sorted inputs are grouped (sorted) by key and are then fed to the * operator one group at a time. This "zig-zags" between different inputs if records for the * same key arrive on multiple inputs to ensure that the operator sees all records with a * key as one consecutive group. */ SORTED, /** * Records from {@link #PASS_THROUGH} inputs are passed to the operator before passing any * records from {@link #SORTED} inputs. There are no guarantees on ordering between and * within the different {@link #PASS_THROUGH} inputs. */ PASS_THROUGH; } /** Interface representing chained inputs. */ public interface InputConfig extends Serializable {} /** A representation of a Network {@link InputConfig}. */ public static class NetworkInputConfig implements InputConfig { private final TypeSerializer<?> typeSerializer; private final InputRequirement inputRequirement; private int inputGateIndex; public NetworkInputConfig(TypeSerializer<?> typeSerializer, int inputGateIndex) { this(typeSerializer, inputGateIndex, InputRequirement.PASS_THROUGH); } public NetworkInputConfig( TypeSerializer<?> typeSerializer, int inputGateIndex, InputRequirement inputRequirement) { this.typeSerializer = typeSerializer; this.inputGateIndex = inputGateIndex; this.inputRequirement = inputRequirement; } public TypeSerializer<?> getTypeSerializer() { return typeSerializer; } public int getInputGateIndex() { return inputGateIndex; } public InputRequirement getInputRequirement() { return inputRequirement; } } /** A serialized representation of an input. */ public static class SourceInputConfig implements InputConfig { private final StreamEdge inputEdge; public SourceInputConfig(StreamEdge inputEdge) { this.inputEdge = inputEdge; } public StreamEdge getInputEdge() { return inputEdge; } @Override public String toString() { return inputEdge.toString(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof SourceInputConfig)) { return false; } SourceInputConfig other = (SourceInputConfig) obj; return Objects.equals(other.inputEdge, inputEdge); } @Override public int hashCode() { return inputEdge.hashCode(); } } public static boolean requiresSorting(InputConfig inputConfig) { return inputConfig instanceof NetworkInputConfig && ((NetworkInputConfig) inputConfig).getInputRequirement() == InputRequirement.SORTED; } }
googleapis/google-api-java-client-services
37,479
clients/google-api-services-speech/v1p1beta1/1.31.0/com/google/api/services/speech/v1p1beta1/model/RecognitionConfig.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.speech.v1p1beta1.model; /** * Provides information to the recognizer that specifies how to process the request. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Speech-to-Text API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class RecognitionConfig extends com.google.api.client.json.GenericJson { /** * Speech adaptation configuration improves the accuracy of speech recognition. For more * information, see the [speech adaptation](https://cloud.google.com/speech-to- * text/docs/adaptation) documentation. When speech adaptation is set it supersedes the * `speech_contexts` field. * The value may be {@code null}. */ @com.google.api.client.util.Key private SpeechAdaptation adaptation; /** * A list of up to 3 additional [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language * tags, listing possible alternative languages of the supplied audio. See [Language * Support](https://cloud.google.com/speech-to-text/docs/languages) for a list of the currently * supported language codes. If alternative languages are listed, recognition result will contain * recognition in the most likely language detected including the main language_code. The * recognition result will include the language tag of the language detected in the audio. Note: * This feature is only supported for Voice Command and Voice Search use cases and performance may * vary for other use cases (e.g., phone call transcription). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> alternativeLanguageCodes; /** * The number of channels in the input audio data. ONLY set this for MULTI-CHANNEL recognition. * Valid values for LINEAR16 and FLAC are `1`-`8`. Valid values for OGG_OPUS are '1'-'254'. Valid * value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only `1`. If `0` or omitted, * defaults to one channel (mono). Note: We only recognize the first channel by default. To * perform independent recognition on each channel set `enable_separate_recognition_per_channel` * to 'true'. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer audioChannelCount; /** * Config to enable speaker diarization and set additional parameters to make diarization better * suited for your application. Note: When this is enabled, we send all the words from the * beginning of the audio for the top alternative in every consecutive STREAMING responses. This * is done in order to improve our speaker tags as our models learn to identify the speakers in * the conversation over time. For non-streaming requests, the diarization results will be * provided only in the top alternative of the FINAL SpeechRecognitionResult. * The value may be {@code null}. */ @com.google.api.client.util.Key private SpeakerDiarizationConfig diarizationConfig; /** * If set, specifies the estimated number of speakers in the conversation. Defaults to '2'. * Ignored unless enable_speaker_diarization is set to true. Note: Use diarization_config instead. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer diarizationSpeakerCount; /** * If 'true', adds punctuation to recognition result hypotheses. This feature is only available in * select languages. Setting this for requests in other languages has no effect at all. The * default 'false' value does not add punctuation to result hypotheses. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableAutomaticPunctuation; /** * This needs to be set to `true` explicitly and `audio_channel_count` > 1 to get each channel * recognized separately. The recognition result will contain a `channel_tag` field to state which * channel that result belongs to. If this is not true, we will only recognize the first channel. * The request is billed cumulatively for all channels recognized: `audio_channel_count` * multiplied by the length of the audio. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableSeparateRecognitionPerChannel; /** * If 'true', enables speaker detection for each recognized word in the top alternative of the * recognition result using a speaker_tag provided in the WordInfo. Note: Use diarization_config * instead. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableSpeakerDiarization; /** * The spoken emoji behavior for the call If not set, uses default behavior based on model of * choice If 'true', adds spoken emoji formatting for the request. This will replace spoken emojis * with the corresponding Unicode symbols in the final transcript. If 'false', spoken emojis are * not replaced. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableSpokenEmojis; /** * The spoken punctuation behavior for the call If not set, uses default behavior based on model * of choice e.g. command_and_search will enable spoken punctuation by default If 'true', replaces * spoken punctuation with the corresponding symbols in the request. For example, "how are you * question mark" becomes "how are you?". See https://cloud.google.com/speech-to-text/docs/spoken- * punctuation for support. If 'false', spoken punctuation is not replaced. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableSpokenPunctuation; /** * If `true`, the top result includes a list of words and the confidence for those words. If * `false`, no word-level confidence information is returned. The default is `false`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableWordConfidence; /** * If `true`, the top result includes a list of words and the start and end time offsets * (timestamps) for those words. If `false`, no word-level time offset information is returned. * The default is `false`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableWordTimeOffsets; /** * Encoding of audio data sent in all `RecognitionAudio` messages. This field is optional for * `FLAC` and `WAV` audio files and required for all other audio formats. For details, see * AudioEncoding. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String encoding; /** * Required. The language of the supplied audio as a [BCP-47](https://www.rfc- * editor.org/rfc/bcp/bcp47.txt) language tag. Example: "en-US". See [Language * Support](https://cloud.google.com/speech-to-text/docs/languages) for a list of the currently * supported language codes. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String languageCode; /** * Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of * `SpeechRecognitionAlternative` messages within each `SpeechRecognitionResult`. The server may * return fewer than `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will * return a maximum of one. If omitted, will return a maximum of one. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer maxAlternatives; /** * Metadata regarding this request. * The value may be {@code null}. */ @com.google.api.client.util.Key private RecognitionMetadata metadata; /** * Which model to select for the given request. Select the model best suited to your domain to get * best results. If a model is not explicitly specified, then we auto-select a model based on the * parameters in the RecognitionConfig. *Model* *Description* latest_long Best for long form * content like media or conversation. latest_short Best for short form content like commands or * single shot directed speech. command_and_search Best for short queries such as voice commands * or voice search. phone_call Best for audio that originated from a phone call (typically * recorded at an 8khz sampling rate). video Best for audio that originated from video or includes * multiple speakers. Ideally the audio is recorded at a 16khz or greater sampling rate. This is a * premium model that costs more than the standard rate. default Best for audio that is not one of * the specific audio models. For example, long-form audio. Ideally the audio is high-fidelity, * recorded at a 16khz or greater sampling rate. medical_conversation Best for audio that * originated from a conversation between a medical provider and patient. medical_dictation Best * for audio that originated from dictation notes by a medical provider. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String model; /** * If set to `true`, the server will attempt to filter out profanities, replacing all but the * initial character in each filtered word with asterisks, e.g. "f***". If set to `false` or * omitted, profanities won't be filtered out. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean profanityFilter; /** * Sample rate in Hertz of the audio data sent in all `RecognitionAudio` messages. Valid values * are: 8000-48000. 16000 is optimal. For best results, set the sampling rate of the audio source * to 16000 Hz. If that's not possible, use the native sample rate of the audio source (instead of * re-sampling). This field is optional for FLAC and WAV audio files, but is required for all * other audio formats. For details, see AudioEncoding. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer sampleRateHertz; /** * Array of SpeechContext. A means to provide context to assist the speech recognition. For more * information, see [speech adaptation](https://cloud.google.com/speech-to-text/docs/adaptation). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<SpeechContext> speechContexts; /** * Use transcription normalization to automatically replace parts of the transcript with phrases * of your choosing. For StreamingRecognize, this normalization only applies to stable partial * transcripts (stability > 0.8) and final transcripts. * The value may be {@code null}. */ @com.google.api.client.util.Key private TranscriptNormalization transcriptNormalization; /** * Set to true to use an enhanced model for speech recognition. If `use_enhanced` is set to true * and the `model` field is not set, then an appropriate enhanced model is chosen if an enhanced * model exists for the audio. If `use_enhanced` is true and an enhanced version of the specified * model does not exist, then the speech is recognized using the standard version of the specified * model. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean useEnhanced; /** * Speech adaptation configuration improves the accuracy of speech recognition. For more * information, see the [speech adaptation](https://cloud.google.com/speech-to- * text/docs/adaptation) documentation. When speech adaptation is set it supersedes the * `speech_contexts` field. * @return value or {@code null} for none */ public SpeechAdaptation getAdaptation() { return adaptation; } /** * Speech adaptation configuration improves the accuracy of speech recognition. For more * information, see the [speech adaptation](https://cloud.google.com/speech-to- * text/docs/adaptation) documentation. When speech adaptation is set it supersedes the * `speech_contexts` field. * @param adaptation adaptation or {@code null} for none */ public RecognitionConfig setAdaptation(SpeechAdaptation adaptation) { this.adaptation = adaptation; return this; } /** * A list of up to 3 additional [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language * tags, listing possible alternative languages of the supplied audio. See [Language * Support](https://cloud.google.com/speech-to-text/docs/languages) for a list of the currently * supported language codes. If alternative languages are listed, recognition result will contain * recognition in the most likely language detected including the main language_code. The * recognition result will include the language tag of the language detected in the audio. Note: * This feature is only supported for Voice Command and Voice Search use cases and performance may * vary for other use cases (e.g., phone call transcription). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getAlternativeLanguageCodes() { return alternativeLanguageCodes; } /** * A list of up to 3 additional [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language * tags, listing possible alternative languages of the supplied audio. See [Language * Support](https://cloud.google.com/speech-to-text/docs/languages) for a list of the currently * supported language codes. If alternative languages are listed, recognition result will contain * recognition in the most likely language detected including the main language_code. The * recognition result will include the language tag of the language detected in the audio. Note: * This feature is only supported for Voice Command and Voice Search use cases and performance may * vary for other use cases (e.g., phone call transcription). * @param alternativeLanguageCodes alternativeLanguageCodes or {@code null} for none */ public RecognitionConfig setAlternativeLanguageCodes(java.util.List<java.lang.String> alternativeLanguageCodes) { this.alternativeLanguageCodes = alternativeLanguageCodes; return this; } /** * The number of channels in the input audio data. ONLY set this for MULTI-CHANNEL recognition. * Valid values for LINEAR16 and FLAC are `1`-`8`. Valid values for OGG_OPUS are '1'-'254'. Valid * value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only `1`. If `0` or omitted, * defaults to one channel (mono). Note: We only recognize the first channel by default. To * perform independent recognition on each channel set `enable_separate_recognition_per_channel` * to 'true'. * @return value or {@code null} for none */ public java.lang.Integer getAudioChannelCount() { return audioChannelCount; } /** * The number of channels in the input audio data. ONLY set this for MULTI-CHANNEL recognition. * Valid values for LINEAR16 and FLAC are `1`-`8`. Valid values for OGG_OPUS are '1'-'254'. Valid * value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only `1`. If `0` or omitted, * defaults to one channel (mono). Note: We only recognize the first channel by default. To * perform independent recognition on each channel set `enable_separate_recognition_per_channel` * to 'true'. * @param audioChannelCount audioChannelCount or {@code null} for none */ public RecognitionConfig setAudioChannelCount(java.lang.Integer audioChannelCount) { this.audioChannelCount = audioChannelCount; return this; } /** * Config to enable speaker diarization and set additional parameters to make diarization better * suited for your application. Note: When this is enabled, we send all the words from the * beginning of the audio for the top alternative in every consecutive STREAMING responses. This * is done in order to improve our speaker tags as our models learn to identify the speakers in * the conversation over time. For non-streaming requests, the diarization results will be * provided only in the top alternative of the FINAL SpeechRecognitionResult. * @return value or {@code null} for none */ public SpeakerDiarizationConfig getDiarizationConfig() { return diarizationConfig; } /** * Config to enable speaker diarization and set additional parameters to make diarization better * suited for your application. Note: When this is enabled, we send all the words from the * beginning of the audio for the top alternative in every consecutive STREAMING responses. This * is done in order to improve our speaker tags as our models learn to identify the speakers in * the conversation over time. For non-streaming requests, the diarization results will be * provided only in the top alternative of the FINAL SpeechRecognitionResult. * @param diarizationConfig diarizationConfig or {@code null} for none */ public RecognitionConfig setDiarizationConfig(SpeakerDiarizationConfig diarizationConfig) { this.diarizationConfig = diarizationConfig; return this; } /** * If set, specifies the estimated number of speakers in the conversation. Defaults to '2'. * Ignored unless enable_speaker_diarization is set to true. Note: Use diarization_config instead. * @return value or {@code null} for none */ public java.lang.Integer getDiarizationSpeakerCount() { return diarizationSpeakerCount; } /** * If set, specifies the estimated number of speakers in the conversation. Defaults to '2'. * Ignored unless enable_speaker_diarization is set to true. Note: Use diarization_config instead. * @param diarizationSpeakerCount diarizationSpeakerCount or {@code null} for none */ public RecognitionConfig setDiarizationSpeakerCount(java.lang.Integer diarizationSpeakerCount) { this.diarizationSpeakerCount = diarizationSpeakerCount; return this; } /** * If 'true', adds punctuation to recognition result hypotheses. This feature is only available in * select languages. Setting this for requests in other languages has no effect at all. The * default 'false' value does not add punctuation to result hypotheses. * @return value or {@code null} for none */ public java.lang.Boolean getEnableAutomaticPunctuation() { return enableAutomaticPunctuation; } /** * If 'true', adds punctuation to recognition result hypotheses. This feature is only available in * select languages. Setting this for requests in other languages has no effect at all. The * default 'false' value does not add punctuation to result hypotheses. * @param enableAutomaticPunctuation enableAutomaticPunctuation or {@code null} for none */ public RecognitionConfig setEnableAutomaticPunctuation(java.lang.Boolean enableAutomaticPunctuation) { this.enableAutomaticPunctuation = enableAutomaticPunctuation; return this; } /** * This needs to be set to `true` explicitly and `audio_channel_count` > 1 to get each channel * recognized separately. The recognition result will contain a `channel_tag` field to state which * channel that result belongs to. If this is not true, we will only recognize the first channel. * The request is billed cumulatively for all channels recognized: `audio_channel_count` * multiplied by the length of the audio. * @return value or {@code null} for none */ public java.lang.Boolean getEnableSeparateRecognitionPerChannel() { return enableSeparateRecognitionPerChannel; } /** * This needs to be set to `true` explicitly and `audio_channel_count` > 1 to get each channel * recognized separately. The recognition result will contain a `channel_tag` field to state which * channel that result belongs to. If this is not true, we will only recognize the first channel. * The request is billed cumulatively for all channels recognized: `audio_channel_count` * multiplied by the length of the audio. * @param enableSeparateRecognitionPerChannel enableSeparateRecognitionPerChannel or {@code null} for none */ public RecognitionConfig setEnableSeparateRecognitionPerChannel(java.lang.Boolean enableSeparateRecognitionPerChannel) { this.enableSeparateRecognitionPerChannel = enableSeparateRecognitionPerChannel; return this; } /** * If 'true', enables speaker detection for each recognized word in the top alternative of the * recognition result using a speaker_tag provided in the WordInfo. Note: Use diarization_config * instead. * @return value or {@code null} for none */ public java.lang.Boolean getEnableSpeakerDiarization() { return enableSpeakerDiarization; } /** * If 'true', enables speaker detection for each recognized word in the top alternative of the * recognition result using a speaker_tag provided in the WordInfo. Note: Use diarization_config * instead. * @param enableSpeakerDiarization enableSpeakerDiarization or {@code null} for none */ public RecognitionConfig setEnableSpeakerDiarization(java.lang.Boolean enableSpeakerDiarization) { this.enableSpeakerDiarization = enableSpeakerDiarization; return this; } /** * The spoken emoji behavior for the call If not set, uses default behavior based on model of * choice If 'true', adds spoken emoji formatting for the request. This will replace spoken emojis * with the corresponding Unicode symbols in the final transcript. If 'false', spoken emojis are * not replaced. * @return value or {@code null} for none */ public java.lang.Boolean getEnableSpokenEmojis() { return enableSpokenEmojis; } /** * The spoken emoji behavior for the call If not set, uses default behavior based on model of * choice If 'true', adds spoken emoji formatting for the request. This will replace spoken emojis * with the corresponding Unicode symbols in the final transcript. If 'false', spoken emojis are * not replaced. * @param enableSpokenEmojis enableSpokenEmojis or {@code null} for none */ public RecognitionConfig setEnableSpokenEmojis(java.lang.Boolean enableSpokenEmojis) { this.enableSpokenEmojis = enableSpokenEmojis; return this; } /** * The spoken punctuation behavior for the call If not set, uses default behavior based on model * of choice e.g. command_and_search will enable spoken punctuation by default If 'true', replaces * spoken punctuation with the corresponding symbols in the request. For example, "how are you * question mark" becomes "how are you?". See https://cloud.google.com/speech-to-text/docs/spoken- * punctuation for support. If 'false', spoken punctuation is not replaced. * @return value or {@code null} for none */ public java.lang.Boolean getEnableSpokenPunctuation() { return enableSpokenPunctuation; } /** * The spoken punctuation behavior for the call If not set, uses default behavior based on model * of choice e.g. command_and_search will enable spoken punctuation by default If 'true', replaces * spoken punctuation with the corresponding symbols in the request. For example, "how are you * question mark" becomes "how are you?". See https://cloud.google.com/speech-to-text/docs/spoken- * punctuation for support. If 'false', spoken punctuation is not replaced. * @param enableSpokenPunctuation enableSpokenPunctuation or {@code null} for none */ public RecognitionConfig setEnableSpokenPunctuation(java.lang.Boolean enableSpokenPunctuation) { this.enableSpokenPunctuation = enableSpokenPunctuation; return this; } /** * If `true`, the top result includes a list of words and the confidence for those words. If * `false`, no word-level confidence information is returned. The default is `false`. * @return value or {@code null} for none */ public java.lang.Boolean getEnableWordConfidence() { return enableWordConfidence; } /** * If `true`, the top result includes a list of words and the confidence for those words. If * `false`, no word-level confidence information is returned. The default is `false`. * @param enableWordConfidence enableWordConfidence or {@code null} for none */ public RecognitionConfig setEnableWordConfidence(java.lang.Boolean enableWordConfidence) { this.enableWordConfidence = enableWordConfidence; return this; } /** * If `true`, the top result includes a list of words and the start and end time offsets * (timestamps) for those words. If `false`, no word-level time offset information is returned. * The default is `false`. * @return value or {@code null} for none */ public java.lang.Boolean getEnableWordTimeOffsets() { return enableWordTimeOffsets; } /** * If `true`, the top result includes a list of words and the start and end time offsets * (timestamps) for those words. If `false`, no word-level time offset information is returned. * The default is `false`. * @param enableWordTimeOffsets enableWordTimeOffsets or {@code null} for none */ public RecognitionConfig setEnableWordTimeOffsets(java.lang.Boolean enableWordTimeOffsets) { this.enableWordTimeOffsets = enableWordTimeOffsets; return this; } /** * Encoding of audio data sent in all `RecognitionAudio` messages. This field is optional for * `FLAC` and `WAV` audio files and required for all other audio formats. For details, see * AudioEncoding. * @return value or {@code null} for none */ public java.lang.String getEncoding() { return encoding; } /** * Encoding of audio data sent in all `RecognitionAudio` messages. This field is optional for * `FLAC` and `WAV` audio files and required for all other audio formats. For details, see * AudioEncoding. * @param encoding encoding or {@code null} for none */ public RecognitionConfig setEncoding(java.lang.String encoding) { this.encoding = encoding; return this; } /** * Required. The language of the supplied audio as a [BCP-47](https://www.rfc- * editor.org/rfc/bcp/bcp47.txt) language tag. Example: "en-US". See [Language * Support](https://cloud.google.com/speech-to-text/docs/languages) for a list of the currently * supported language codes. * @return value or {@code null} for none */ public java.lang.String getLanguageCode() { return languageCode; } /** * Required. The language of the supplied audio as a [BCP-47](https://www.rfc- * editor.org/rfc/bcp/bcp47.txt) language tag. Example: "en-US". See [Language * Support](https://cloud.google.com/speech-to-text/docs/languages) for a list of the currently * supported language codes. * @param languageCode languageCode or {@code null} for none */ public RecognitionConfig setLanguageCode(java.lang.String languageCode) { this.languageCode = languageCode; return this; } /** * Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of * `SpeechRecognitionAlternative` messages within each `SpeechRecognitionResult`. The server may * return fewer than `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will * return a maximum of one. If omitted, will return a maximum of one. * @return value or {@code null} for none */ public java.lang.Integer getMaxAlternatives() { return maxAlternatives; } /** * Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of * `SpeechRecognitionAlternative` messages within each `SpeechRecognitionResult`. The server may * return fewer than `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will * return a maximum of one. If omitted, will return a maximum of one. * @param maxAlternatives maxAlternatives or {@code null} for none */ public RecognitionConfig setMaxAlternatives(java.lang.Integer maxAlternatives) { this.maxAlternatives = maxAlternatives; return this; } /** * Metadata regarding this request. * @return value or {@code null} for none */ public RecognitionMetadata getMetadata() { return metadata; } /** * Metadata regarding this request. * @param metadata metadata or {@code null} for none */ public RecognitionConfig setMetadata(RecognitionMetadata metadata) { this.metadata = metadata; return this; } /** * Which model to select for the given request. Select the model best suited to your domain to get * best results. If a model is not explicitly specified, then we auto-select a model based on the * parameters in the RecognitionConfig. *Model* *Description* latest_long Best for long form * content like media or conversation. latest_short Best for short form content like commands or * single shot directed speech. command_and_search Best for short queries such as voice commands * or voice search. phone_call Best for audio that originated from a phone call (typically * recorded at an 8khz sampling rate). video Best for audio that originated from video or includes * multiple speakers. Ideally the audio is recorded at a 16khz or greater sampling rate. This is a * premium model that costs more than the standard rate. default Best for audio that is not one of * the specific audio models. For example, long-form audio. Ideally the audio is high-fidelity, * recorded at a 16khz or greater sampling rate. medical_conversation Best for audio that * originated from a conversation between a medical provider and patient. medical_dictation Best * for audio that originated from dictation notes by a medical provider. * @return value or {@code null} for none */ public java.lang.String getModel() { return model; } /** * Which model to select for the given request. Select the model best suited to your domain to get * best results. If a model is not explicitly specified, then we auto-select a model based on the * parameters in the RecognitionConfig. *Model* *Description* latest_long Best for long form * content like media or conversation. latest_short Best for short form content like commands or * single shot directed speech. command_and_search Best for short queries such as voice commands * or voice search. phone_call Best for audio that originated from a phone call (typically * recorded at an 8khz sampling rate). video Best for audio that originated from video or includes * multiple speakers. Ideally the audio is recorded at a 16khz or greater sampling rate. This is a * premium model that costs more than the standard rate. default Best for audio that is not one of * the specific audio models. For example, long-form audio. Ideally the audio is high-fidelity, * recorded at a 16khz or greater sampling rate. medical_conversation Best for audio that * originated from a conversation between a medical provider and patient. medical_dictation Best * for audio that originated from dictation notes by a medical provider. * @param model model or {@code null} for none */ public RecognitionConfig setModel(java.lang.String model) { this.model = model; return this; } /** * If set to `true`, the server will attempt to filter out profanities, replacing all but the * initial character in each filtered word with asterisks, e.g. "f***". If set to `false` or * omitted, profanities won't be filtered out. * @return value or {@code null} for none */ public java.lang.Boolean getProfanityFilter() { return profanityFilter; } /** * If set to `true`, the server will attempt to filter out profanities, replacing all but the * initial character in each filtered word with asterisks, e.g. "f***". If set to `false` or * omitted, profanities won't be filtered out. * @param profanityFilter profanityFilter or {@code null} for none */ public RecognitionConfig setProfanityFilter(java.lang.Boolean profanityFilter) { this.profanityFilter = profanityFilter; return this; } /** * Sample rate in Hertz of the audio data sent in all `RecognitionAudio` messages. Valid values * are: 8000-48000. 16000 is optimal. For best results, set the sampling rate of the audio source * to 16000 Hz. If that's not possible, use the native sample rate of the audio source (instead of * re-sampling). This field is optional for FLAC and WAV audio files, but is required for all * other audio formats. For details, see AudioEncoding. * @return value or {@code null} for none */ public java.lang.Integer getSampleRateHertz() { return sampleRateHertz; } /** * Sample rate in Hertz of the audio data sent in all `RecognitionAudio` messages. Valid values * are: 8000-48000. 16000 is optimal. For best results, set the sampling rate of the audio source * to 16000 Hz. If that's not possible, use the native sample rate of the audio source (instead of * re-sampling). This field is optional for FLAC and WAV audio files, but is required for all * other audio formats. For details, see AudioEncoding. * @param sampleRateHertz sampleRateHertz or {@code null} for none */ public RecognitionConfig setSampleRateHertz(java.lang.Integer sampleRateHertz) { this.sampleRateHertz = sampleRateHertz; return this; } /** * Array of SpeechContext. A means to provide context to assist the speech recognition. For more * information, see [speech adaptation](https://cloud.google.com/speech-to-text/docs/adaptation). * @return value or {@code null} for none */ public java.util.List<SpeechContext> getSpeechContexts() { return speechContexts; } /** * Array of SpeechContext. A means to provide context to assist the speech recognition. For more * information, see [speech adaptation](https://cloud.google.com/speech-to-text/docs/adaptation). * @param speechContexts speechContexts or {@code null} for none */ public RecognitionConfig setSpeechContexts(java.util.List<SpeechContext> speechContexts) { this.speechContexts = speechContexts; return this; } /** * Use transcription normalization to automatically replace parts of the transcript with phrases * of your choosing. For StreamingRecognize, this normalization only applies to stable partial * transcripts (stability > 0.8) and final transcripts. * @return value or {@code null} for none */ public TranscriptNormalization getTranscriptNormalization() { return transcriptNormalization; } /** * Use transcription normalization to automatically replace parts of the transcript with phrases * of your choosing. For StreamingRecognize, this normalization only applies to stable partial * transcripts (stability > 0.8) and final transcripts. * @param transcriptNormalization transcriptNormalization or {@code null} for none */ public RecognitionConfig setTranscriptNormalization(TranscriptNormalization transcriptNormalization) { this.transcriptNormalization = transcriptNormalization; return this; } /** * Set to true to use an enhanced model for speech recognition. If `use_enhanced` is set to true * and the `model` field is not set, then an appropriate enhanced model is chosen if an enhanced * model exists for the audio. If `use_enhanced` is true and an enhanced version of the specified * model does not exist, then the speech is recognized using the standard version of the specified * model. * @return value or {@code null} for none */ public java.lang.Boolean getUseEnhanced() { return useEnhanced; } /** * Set to true to use an enhanced model for speech recognition. If `use_enhanced` is set to true * and the `model` field is not set, then an appropriate enhanced model is chosen if an enhanced * model exists for the audio. If `use_enhanced` is true and an enhanced version of the specified * model does not exist, then the speech is recognized using the standard version of the specified * model. * @param useEnhanced useEnhanced or {@code null} for none */ public RecognitionConfig setUseEnhanced(java.lang.Boolean useEnhanced) { this.useEnhanced = useEnhanced; return this; } @Override public RecognitionConfig set(String fieldName, Object value) { return (RecognitionConfig) super.set(fieldName, value); } @Override public RecognitionConfig clone() { return (RecognitionConfig) super.clone(); } }
apache/hive
36,980
common/src/java/org/apache/hive/common/util/HiveStringUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.common.util; import com.google.common.base.Splitter; import java.io.PrintWriter; import java.io.StringWriter; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.UnknownHostException; import java.text.DateFormat; import java.text.DecimalFormat; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.StringTokenizer; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.text.translate.CharSequenceTranslator; import org.apache.commons.lang3.text.translate.EntityArrays; import org.apache.commons.lang3.text.translate.JavaUnicodeEscaper; import org.apache.commons.lang3.text.translate.LookupTranslator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.io.Text; /** * HiveStringUtils * General string utils * * Originally copied from o.a.hadoop.util.StringUtils */ @InterfaceAudience.Private @InterfaceStability.Unstable public class HiveStringUtils { /** * Priority of the StringUtils shutdown hook. */ public static final int SHUTDOWN_HOOK_PRIORITY = 0; private static final DecimalFormat decimalFormat; private static final CharSequenceTranslator ESCAPE_JAVA = new LookupTranslator( new String[][] { {"\"", "\\\""}, {"\\", "\\\\"}, }).with( new LookupTranslator(EntityArrays.JAVA_CTRL_CHARS_ESCAPE())); private static final CharSequenceTranslator ESCAPE_HIVE_COMMAND = new LookupTranslator( new String[][] { {"'", "\\'"}, {";", "\\;"}, {"\\", "\\\\"}, }).with( new LookupTranslator(EntityArrays.JAVA_CTRL_CHARS_ESCAPE())); private static final CharSequenceTranslator UNICODE_CONVERTER = JavaUnicodeEscaper.outsideOf(32, 127); static { NumberFormat numberFormat = NumberFormat.getNumberInstance(Locale.ENGLISH); decimalFormat = (DecimalFormat) numberFormat; decimalFormat.applyPattern("#.##"); } /** * Return the internalized string, or null if the given string is null. * @param str The string to intern * @return The identical string cached in the string pool. */ public static String intern(String str) { if(str == null) { return null; } return str.intern(); } /** * Return an interned list with identical contents as the given list. * @param list The list whose strings will be interned * @return An identical list with its strings interned. */ public static List<String> intern(List<String> list) { if(list == null) { return null; } List<String> newList = new ArrayList<String>(list.size()); for(String str : list) { newList.add(intern(str)); } return newList; } /** * Return an interned map with identical contents as the given map. * @param map The map whose strings will be interned * @return An identical map with its strings interned. */ public static Map<String, String> intern(Map<String, String> map) { if(map == null) { return null; } if (map.isEmpty()) { // nothing to intern return map; } Map<String, String> newMap = new HashMap<String, String>(map.size()); for(Map.Entry<String, String> entry : map.entrySet()) { newMap.put(intern(entry.getKey()), intern(entry.getValue())); } return newMap; } /** * Make a string representation of the exception. * @param e The exception to stringify * @return A string with exception name and call stack. */ public static String stringifyException(Throwable e) { StringWriter stm = new StringWriter(); PrintWriter wrt = new PrintWriter(stm); e.printStackTrace(wrt); wrt.close(); return stm.toString(); } /** * Given a full hostname, return the word upto the first dot. * @param fullHostname the full hostname * @return the hostname to the first dot */ public static String simpleHostname(String fullHostname) { int offset = fullHostname.indexOf('.'); if (offset != -1) { return fullHostname.substring(0, offset); } return fullHostname; } /** * Format a percentage for presentation to the user. * @param done the percentage to format (0.0 to 1.0) * @param digits the number of digits past the decimal point * @return a string representation of the percentage */ public static String formatPercent(double done, int digits) { DecimalFormat percentFormat = new DecimalFormat("0.00%"); double scale = Math.pow(10.0, digits+2); double rounded = Math.floor(done * scale); percentFormat.setDecimalSeparatorAlwaysShown(false); percentFormat.setMinimumFractionDigits(digits); percentFormat.setMaximumFractionDigits(digits); return percentFormat.format(rounded / scale); } /** * Given an array of strings, return a comma-separated list of its elements. * @param strs Array of strings * @return Empty string if strs.length is 0, comma separated list of strings * otherwise */ public static String arrayToString(String[] strs) { if (strs.length == 0) { return ""; } StringBuilder sbuf = new StringBuilder(); sbuf.append(strs[0]); for (int idx = 1; idx < strs.length; idx++) { sbuf.append(","); sbuf.append(strs[idx]); } return sbuf.toString(); } /** * Given an array of bytes it will convert the bytes to a hex string * representation of the bytes * @param bytes * @param start start index, inclusively * @param end end index, exclusively * @return hex string representation of the byte array */ public static String byteToHexString(byte[] bytes, int start, int end) { if (bytes == null) { throw new IllegalArgumentException("bytes == null"); } StringBuilder s = new StringBuilder(); for(int i = start; i < end; i++) { s.append("%02x".formatted(bytes[i])); } return s.toString(); } /** Same as byteToHexString(bytes, 0, bytes.length). */ public static String byteToHexString(byte bytes[]) { return byteToHexString(bytes, 0, bytes.length); } /** * Given a hexstring this will return the byte array corresponding to the * string * @param hex the hex String array * @return a byte array that is a hex string representation of the given * string. The size of the byte array is therefore hex.length/2 */ public static byte[] hexStringToByte(String hex) { byte[] bts = new byte[hex.length() / 2]; for (int i = 0; i < bts.length; i++) { bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16); } return bts; } /** * * @param uris */ public static String uriToString(URI[] uris){ if (uris == null) { return null; } StringBuilder ret = new StringBuilder(uris[0].toString()); for(int i = 1; i < uris.length;i++){ ret.append(","); ret.append(uris[i].toString()); } return ret.toString(); } /** * @param str * The string array to be parsed into an URI array. * @return <pre>null</pre> if str is <pre>null</pre>, else the URI array * equivalent to str. * @throws IllegalArgumentException * If any string in str violates RFC&nbsp;2396. */ public static URI[] stringToURI(String[] str){ if (str == null) { return null; } URI[] uris = new URI[str.length]; for (int i = 0; i < str.length;i++){ try{ uris[i] = new URI(str[i]); }catch(URISyntaxException ur){ throw new IllegalArgumentException( "Failed to create uri for " + str[i], ur); } } return uris; } /** * * @param str */ public static Path[] stringToPath(String[] str){ if (str == null) { return null; } Path[] p = new Path[str.length]; for (int i = 0; i < str.length;i++){ p[i] = new Path(str[i]); } return p; } /** * * Given a finish and start time in long milliseconds, returns a * String in the format Xhrs, Ymins, Z sec, for the time difference between two times. * If finish time comes before start time then negative values of X, Y and Z will return. * * @param finishTime finish time * @param startTime start time */ public static String formatTimeDiff(long finishTime, long startTime){ long timeDiff = finishTime - startTime; return formatTime(timeDiff); } /** * * Given the time in long milliseconds, returns a * String in the format Xhrs, Ymins, Z sec. * * @param timeDiff The time difference to format */ public static String formatTime(long timeDiff){ StringBuilder buf = new StringBuilder(); long hours = timeDiff / (60*60*1000); long rem = (timeDiff % (60*60*1000)); long minutes = rem / (60*1000); rem = rem % (60*1000); long seconds = rem / 1000; if (hours != 0){ buf.append(hours); buf.append("hrs, "); } if (minutes != 0){ buf.append(minutes); buf.append("mins, "); } // return "0sec if no difference buf.append(seconds); buf.append("sec"); return buf.toString(); } /** * Formats time in ms and appends difference (finishTime - startTime) * as returned by formatTimeDiff(). * If finish time is 0, empty string is returned, if start time is 0 * then difference is not appended to return value. * @param dateFormat date format to use * @param finishTime finish time * @param startTime start time * @return formatted value. */ public static String getFormattedTimeWithDiff(DateFormat dateFormat, long finishTime, long startTime){ StringBuilder buf = new StringBuilder(); if (0 != finishTime) { buf.append(dateFormat.format(new Date(finishTime))); if (0 != startTime){ buf.append(" (" + formatTimeDiff(finishTime , startTime) + ")"); } } return buf.toString(); } /** * Returns an arraylist of strings. * @param str the comma separated string values * @return the arraylist of the comma separated string values */ public static String[] getStrings(String str){ Collection<String> values = getStringCollection(str); if(values.size() == 0) { return null; } return values.toArray(new String[values.size()]); } /** * Returns a collection of strings. * @param str comma separated string values * @return an <code>ArrayList</code> of string values */ public static Collection<String> getStringCollection(String str){ List<String> values = new ArrayList<String>(); if (str == null) { return values; } StringTokenizer tokenizer = new StringTokenizer (str,","); values = new ArrayList<String>(); while (tokenizer.hasMoreTokens()) { values.add(tokenizer.nextToken()); } return values; } /** * Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value. * @param str a comma separated <code>String</code> with values * @return a <code>Collection</code> of <code>String</code> values */ public static Collection<String> getTrimmedStringCollection(String str){ return new ArrayList<String>( Arrays.asList(getTrimmedStrings(str))); } /** * Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value. * @param str a comma separated <code>String</code> with values * @return an array of <code>String</code> values */ public static String[] getTrimmedStrings(String str){ if (null == str || "".equals(str.trim())) { return emptyStringArray; } return str.trim().split("\\s*,\\s*"); } final public static String[] emptyStringArray = {}; final public static char COMMA = ','; final public static char EQUALS = '='; final public static String COMMA_STR = ","; final public static char ESCAPE_CHAR = '\\'; /** * Split a string using the default separator * @param str a string that may have escaped separator * @return an array of strings */ public static String[] split(String str) { return split(str, ESCAPE_CHAR, COMMA); } /** * Split a string using the given separator * @param str a string that may have escaped separator * @param escapeChar a char that be used to escape the separator * @param separator a separator char * @return an array of strings */ public static String[] split( String str, char escapeChar, char separator) { if (str==null) { return null; } ArrayList<String> strList = new ArrayList<String>(); StringBuilder split = new StringBuilder(); int index = 0; while ((index = findNext(str, separator, escapeChar, index, split)) >= 0) { ++index; // move over the separator for next search strList.add(split.toString()); split.setLength(0); // reset the buffer } strList.add(split.toString()); // remove trailing empty split(s) int last = strList.size(); // last split while (--last>=0 && "".equals(strList.get(last))) { strList.remove(last); } return strList.toArray(new String[strList.size()]); } /** * Split a string using the given separator, with no escaping performed. * @param str a string to be split. Note that this may not be null. * @param separator a separator char * @return an array of strings */ public static String[] split( String str, char separator) { // String.split returns a single empty result for splitting the empty // string. if ("".equals(str)) { return new String[]{""}; } ArrayList<String> strList = new ArrayList<String>(); int startIndex = 0; int nextIndex = 0; while ((nextIndex = str.indexOf((int)separator, startIndex)) != -1) { strList.add(str.substring(startIndex, nextIndex)); startIndex = nextIndex + 1; } strList.add(str.substring(startIndex)); // remove trailing empty split(s) int last = strList.size(); // last split while (--last>=0 && "".equals(strList.get(last))) { strList.remove(last); } return strList.toArray(new String[strList.size()]); } /** * Split a string using the default separator/escape character, * then unescape the resulting array of strings * @param str * @return an array of unescaped strings */ public static String[] splitAndUnEscape(String str) { return splitAndUnEscape(str, ESCAPE_CHAR, COMMA); } /** * Split a string using the specified separator/escape character, * then unescape the resulting array of strings using the same escape/separator. * @param str a string that may have escaped separator * @param escapeChar a char that be used to escape the separator * @param separator a separator char * @return an array of unescaped strings */ public static String[] splitAndUnEscape(String str, char escapeChar, char separator) { String[] result = split(str, escapeChar, separator); if (result != null) { for (int idx = 0; idx < result.length; ++idx) { result[idx] = unEscapeString(result[idx], escapeChar, separator); } } return result; } /** * In a given string of comma-separated key=value pairs associates the specified value with * the specified key. * If the `string` previously contained a mapping for the key, the old value is replaced. * * @param key key with which the specified value is to be associated * @param value value to be associated with the specified key * @param strKvPairs Comma separated key=value pairs Eg: "k1=v1, k2=v2, k3=v3" * @return Updated comma separated string of key=value pairs */ public static String insertValue(String key, String value, String strKvPairs) { boolean keyNotFound = true; String[] keyValuePairs = HiveStringUtils.split(strKvPairs); StringBuilder sb = new StringBuilder(); for (int i = 0; i < keyValuePairs.length; i++) { String[] pair = HiveStringUtils.split(keyValuePairs[i], ESCAPE_CHAR, EQUALS); if (pair.length != 2) { throw new RuntimeException("Error parsing the keyvalue pair " + keyValuePairs[i]); } sb.append(pair[0]).append(EQUALS); if (pair[0].equals(key)) { sb.append(value); keyNotFound = false; } else { sb.append(pair[1]); } if (i < (keyValuePairs.length - 1) || keyNotFound) { sb.append(COMMA); } } if (keyNotFound) { sb.append(key).append(EQUALS).append(value); } return sb.toString(); } /** * Finds the first occurrence of the separator character ignoring the escaped * separators starting from the index. Note the substring between the index * and the position of the separator is passed. * @param str the source string * @param separator the character to find * @param escapeChar character used to escape * @param start from where to search * @param split used to pass back the extracted string */ public static int findNext(String str, char separator, char escapeChar, int start, StringBuilder split) { int numPreEscapes = 0; for (int i = start; i < str.length(); i++) { char curChar = str.charAt(i); if (numPreEscapes == 0 && curChar == separator) { // separator return i; } else { split.append(curChar); numPreEscapes = (curChar == escapeChar) ? (++numPreEscapes) % 2 : 0; } } return -1; } /** * Escape commas in the string using the default escape char * @param str a string * @return an escaped string */ public static String escapeString(String str) { return escapeString(str, ESCAPE_CHAR, COMMA); } /** * Escape <code>charToEscape</code> in the string * with the escape char <code>escapeChar</code> * * @param str string * @param escapeChar escape char * @param charToEscape the char to be escaped * @return an escaped string */ public static String escapeString( String str, char escapeChar, char charToEscape) { return escapeString(str, escapeChar, new char[] {charToEscape}); } // check if the character array has the character private static boolean hasChar(char[] chars, char character) { for (char target : chars) { if (character == target) { return true; } } return false; } /** * @param charsToEscape array of characters to be escaped */ public static String escapeString(String str, char escapeChar, char[] charsToEscape) { if (str == null) { return null; } StringBuilder result = new StringBuilder(); for (int i=0; i<str.length(); i++) { char curChar = str.charAt(i); if (curChar == escapeChar || hasChar(charsToEscape, curChar)) { // special char result.append(escapeChar); } result.append(curChar); } return result.toString(); } /** * Escape non-unicode characters. StringEscapeUtil.escapeJava() will escape * unicode characters as well but in some cases it's not desired. * * @param str Original string * @return Escaped string */ public static String escapeJava(String str) { return ESCAPE_JAVA.translate(str); } /** * Escape non-unicode characters, and ', and ; * Like StringEscapeUtil.escapeJava() will escape * unicode characters as well but in some cases it's not desired. * * @param str Original string * @return Escaped string */ public static String escapeHiveCommand(String str) { return ESCAPE_HIVE_COMMAND.translate(str); } /** * Escape java unicode characters. * * @param str Original string * @return Escaped string */ public static String escapeUnicode(String str) { return UNICODE_CONVERTER.translate(str); } /** * Unescape commas in the string using the default escape char * @param str a string * @return an unescaped string */ public static String unEscapeString(String str) { return unEscapeString(str, ESCAPE_CHAR, COMMA); } /** * Unescape <code>charToEscape</code> in the string * with the escape char <code>escapeChar</code> * * @param str string * @param escapeChar escape char * @param charToEscape the escaped char * @return an unescaped string */ public static String unEscapeString( String str, char escapeChar, char charToEscape) { return unEscapeString(str, escapeChar, new char[] {charToEscape}); } /** * @param charsToEscape array of characters to unescape */ public static String unEscapeString(String str, char escapeChar, char[] charsToEscape) { if (str == null) { return null; } StringBuilder result = new StringBuilder(str.length()); boolean hasPreEscape = false; for (int i=0; i<str.length(); i++) { char curChar = str.charAt(i); if (hasPreEscape) { if (curChar != escapeChar && !hasChar(charsToEscape, curChar)) { // no special char throw new IllegalArgumentException("Illegal escaped string " + str + " unescaped " + escapeChar + " at " + (i-1)); } // otherwise discard the escape char result.append(curChar); hasPreEscape = false; } else { if (hasChar(charsToEscape, curChar)) { throw new IllegalArgumentException("Illegal escaped string " + str + " unescaped " + curChar + " at " + i); } else if (curChar == escapeChar) { hasPreEscape = true; } else { result.append(curChar); } } } if (hasPreEscape ) { throw new IllegalArgumentException("Illegal escaped string " + str + ", not expecting " + escapeChar + " in the end." ); } return result.toString(); } /** * Return a message for logging. * @param prefix prefix keyword for the message * @param msg content of the message * @return a message for logging */ private static String toStartupShutdownString(String prefix, String [] msg) { StringBuilder b = new StringBuilder(prefix); b.append("\n/************************************************************"); for(String s : msg) { b.append("\n" + prefix + s); } b.append("\n************************************************************/"); return b.toString(); } /** * Print a log message for starting up and shutting down * @param clazz the class of the server * @param args arguments * @param LOG the target log object */ public static void startupShutdownMessage(Class<?> clazz, String[] args, final org.slf4j.Logger LOG) { final String hostname = getHostname(); final String classname = clazz.getSimpleName(); LOG.info( toStartupShutdownString("STARTUP_MSG: ", new String[] { "Starting " + classname, " host = " + hostname, " args = " + Arrays.asList(args), " version = " + HiveVersionInfo.getVersion(), " classpath = " + System.getProperty("java.class.path"), " build = " + HiveVersionInfo.getUrl() + " -r " + HiveVersionInfo.getRevision() + "; compiled by '" + HiveVersionInfo.getUser() + "' on " + HiveVersionInfo.getDate()} ) ); ShutdownHookManager.addShutdownHook( new Runnable() { @Override public void run() { LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{ "Shutting down " + classname + " at " + hostname})); } }, SHUTDOWN_HOOK_PRIORITY); } /** * Return hostname without throwing exception. * @return hostname */ public static String getHostname() { try {return "" + InetAddress.getLocalHost();} catch(UnknownHostException uhe) {return "" + uhe;} } /** * The traditional binary prefixes, kilo, mega, ..., exa, * which can be represented by a 64-bit integer. * TraditionalBinaryPrefix symbol are case insensitive. */ public static enum TraditionalBinaryPrefix { KILO(1024), MEGA(KILO.value << 10), GIGA(MEGA.value << 10), TERA(GIGA.value << 10), PETA(TERA.value << 10), EXA(PETA.value << 10); public final long value; public final char symbol; TraditionalBinaryPrefix(long value) { this.value = value; this.symbol = toString().charAt(0); } /** * @return The TraditionalBinaryPrefix object corresponding to the symbol. */ public static TraditionalBinaryPrefix valueOf(char symbol) { symbol = Character.toUpperCase(symbol); for(TraditionalBinaryPrefix prefix : TraditionalBinaryPrefix.values()) { if (symbol == prefix.symbol) { return prefix; } } throw new IllegalArgumentException("Unknown symbol '" + symbol + "'"); } /** * Convert a string to long. * The input string is first be trimmed * and then it is parsed with traditional binary prefix. * * For example, * "-1230k" will be converted to -1230 * 1024 = -1259520; * "891g" will be converted to 891 * 1024^3 = 956703965184; * * @param s input string * @return a long value represented by the input string. */ public static long string2long(String s) { s = s.trim(); final int lastpos = s.length() - 1; final char lastchar = s.charAt(lastpos); if (Character.isDigit(lastchar)) { return Long.parseLong(s); } else { long prefix; try { prefix = TraditionalBinaryPrefix.valueOf(lastchar).value; } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Invalid size prefix '" + lastchar + "' in '" + s + "'. Allowed prefixes are k, m, g, t, p, e(case insensitive)"); } long num = Long.parseLong(s.substring(0, lastpos)); if (num > (Long.MAX_VALUE/prefix) || num < (Long.MIN_VALUE/prefix)) { throw new IllegalArgumentException(s + " does not fit in a Long"); } return num * prefix; } } } /** * Escapes HTML Special characters present in the string. * @param string * @return HTML Escaped String representation */ public static String escapeHTML(String string) { if(string == null) { return null; } StringBuilder sb = new StringBuilder(); boolean lastCharacterWasSpace = false; char[] chars = string.toCharArray(); for(char c : chars) { if(c == ' ') { if(lastCharacterWasSpace){ lastCharacterWasSpace = false; sb.append("&nbsp;"); }else { lastCharacterWasSpace=true; sb.append(" "); } }else { lastCharacterWasSpace = false; switch(c) { case '<': sb.append("&lt;"); break; case '>': sb.append("&gt;"); break; case '&': sb.append("&amp;"); break; case '"': sb.append("&quot;"); break; default : sb.append(c);break; } } } return sb.toString(); } /** * Return an abbreviated English-language desc of the byte length */ public static String byteDesc(long len) { double val = 0.0; String ending = ""; if (len < 1024 * 1024) { val = (1.0 * len) / 1024; ending = " KB"; } else if (len < 1024 * 1024 * 1024) { val = (1.0 * len) / (1024 * 1024); ending = " MB"; } else if (len < 1024L * 1024 * 1024 * 1024) { val = (1.0 * len) / (1024 * 1024 * 1024); ending = " GB"; } else if (len < 1024L * 1024 * 1024 * 1024 * 1024) { val = (1.0 * len) / (1024L * 1024 * 1024 * 1024); ending = " TB"; } else { val = (1.0 * len) / (1024L * 1024 * 1024 * 1024 * 1024); ending = " PB"; } return limitDecimalTo2(val) + ending; } public static synchronized String limitDecimalTo2(double d) { return decimalFormat.format(d); } /** * Concatenates strings, using a separator. * * @param separator Separator to join with. * @param strings Strings to join. */ public static String join(CharSequence separator, Iterable<?> strings) { Iterator<?> i = strings.iterator(); if (!i.hasNext()) { return ""; } StringBuilder sb = new StringBuilder(i.next().toString()); while (i.hasNext()) { sb.append(separator); sb.append(i.next().toString()); } return sb.toString(); } /** * Concatenates strings, using a separator. Empty/blank string or null will be * ignored. * * @param strings Strings to join. * @param separator Separator to join with. */ public static String joinIgnoringEmpty(String[] strings, char separator) { ArrayList<String> list = new ArrayList<String>(); for(String str : strings) { if (StringUtils.isNotBlank(str)) { list.add(str); } } return StringUtils.join(list, separator); } /** * Convert SOME_STUFF to SomeStuff * * @param s input string * @return camelized string */ public static String camelize(String s) { StringBuilder sb = new StringBuilder(); String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_'); for (String word : words) { sb.append(StringUtils.capitalize(word)); } return sb.toString(); } /** * Checks if b is the first byte of a UTF-8 character. * */ public static boolean isUtfStartByte(byte b) { return (b & 0xC0) != 0x80; } public static int getTextUtfLength(Text t) { byte[] data = t.getBytes(); int len = 0; for (int i = 0; i < t.getLength(); i++) { if (isUtfStartByte(data[i])) { len++; } } return len; } /** * Checks if b is an ascii character */ public static boolean isAscii(byte b) { return (b & 0x80) == 0; } /** * Returns the number of leading whitespace characters in the utf-8 string */ public static int findLeadingSpaces(byte[] bytes, int start, int length) { int numSpaces; for (numSpaces = 0; numSpaces < length; ++numSpaces) { int curPos = start + numSpaces; if (isAscii(bytes[curPos]) && Character.isWhitespace(bytes[curPos])) { continue; } break; // non-space character } return (numSpaces - start); } /** * Returns the number of trailing whitespace characters in the utf-8 string */ public static int findTrailingSpaces(byte[] bytes, int start, int length) { int numSpaces; for (numSpaces = 0; numSpaces < length; ++numSpaces) { int curPos = start + (length - (numSpaces + 1)); if (isAscii(bytes[curPos]) && Character.isWhitespace(bytes[curPos])) { continue; } else { break; // non-space character } } return numSpaces; } /** * Finds trimmed length of utf-8 string */ public static int findTrimmedLength(byte[] bytes, int start, int length, int leadingSpaces) { int trailingSpaces = findTrailingSpaces(bytes, start, length); length = length - leadingSpaces; // If string is entirely whitespace, no need to apply trailingSpaces. if (length > 0) { length = length - trailingSpaces; } return length; } public static String normalizeIdentifier(String identifier) { return identifier.trim().toLowerCase(); } public static String quoteComments(String value) { char[] chars = value.toCharArray(); if (!commentProvided(chars)) { return null; } StringBuilder builder = new StringBuilder(); int prev = 0; for (int i = 0; i < chars.length; i++) { if (chars[i] == 0x00) { if (builder.length() > 0) { builder.append(','); } builder.append('\'').append(chars, prev, i - prev).append('\''); prev = i + 1; } } builder.append(",\'").append(chars, prev, chars.length - prev).append('\''); return builder.toString(); } public static boolean commentProvided(char[] chars) { for (char achar : chars) { if (achar != 0x00) { return true; } } return false; } public static String getPartitionValWithInvalidCharacter( List<String> partVals, Pattern partitionValidationPattern) { String result = null; if (partitionValidationPattern != null) { result = partVals.stream() .filter(partVal -> !partitionValidationPattern.matcher(partVal).matches()) .findFirst() .orElse(null); } return result; } /** * Strip comments from a sql statement, tracking when the statement contains a string literal. * * @param statement the input string * @return a stripped statement */ public static String removeComments(String statement) { if (statement == null) { return null; } Iterator<String> iterator = Splitter.on("\n").omitEmptyStrings().split(statement).iterator(); int[] startQuote = {-1}; StringBuilder ret = new StringBuilder(statement.length()); while (iterator.hasNext()) { String lineWithComments = iterator.next(); String lineNoComments = removeComments(lineWithComments, startQuote); ret.append(lineNoComments); if (iterator.hasNext() && !lineNoComments.isEmpty()) { ret.append("\n"); } } return ret.toString().trim(); } /** * Remove comments from the current line of a query. * Avoid removing comment-like strings inside quotes. * @param line a line of sql text * @param startQuote The value -1 indicates that line does not begin inside a string literal. * Other values indicate that line does begin inside a string literal * and the value passed is the delimiter character. * The array type is used to pass int type as input/output parameter. * @return the line with comments removed. */ public static String removeComments(String line, int[] startQuote) { if (line == null || line.isEmpty()) { return line; } if (startQuote[0] == -1 && isComment(line)) { return ""; //assume # can only be used at the beginning of line. } StringBuilder builder = new StringBuilder(); for (int index = 0; index < line.length();) { if (startQuote[0] == -1 && index < line.length() - 1 && line.charAt(index) == '-' && line.charAt(index + 1) == '-') { // Jump to the end of current line. When a multiple line query is executed with -e parameter, // it is passed in as one line string separated with '\n' for (; index < line.length() && line.charAt(index) != '\n'; ++index); continue; } char letter = line.charAt(index); if (startQuote[0] == letter && (index == 0 || line.charAt(index - 1) != '\\')) { startQuote[0] = -1; // Turn escape off. } else if (startQuote[0] == -1 && (letter == '\'' || letter == '"') && (index == 0 || line.charAt(index - 1) != '\\')) { startQuote[0] = letter; // Turn escape on. } builder.append(letter); index++; } return builder.toString(); } /** * Test whether a line is a comment. * * @param line the line to be tested * @return true if a comment */ private static boolean isComment(String line) { // SQL92 comment prefix is "--" // beeline also supports shell-style "#" prefix String lineTrimmed = line.trim(); return lineTrimmed.startsWith("#") || lineTrimmed.startsWith("--"); } }
apache/pulsar
37,076
pulsar-client-tools/src/main/java/org/apache/pulsar/admin/cli/CmdSinks.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.admin.cli; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.pulsar.common.naming.TopicName.DEFAULT_NAMESPACE; import static org.apache.pulsar.common.naming.TopicName.PUBLIC_TENANT; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; import java.io.File; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Type; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.text.WordUtils; import org.apache.pulsar.admin.cli.utils.CmdUtils; import org.apache.pulsar.client.admin.PulsarAdmin; import org.apache.pulsar.client.admin.PulsarAdminException; import org.apache.pulsar.client.api.SubscriptionInitialPosition; import org.apache.pulsar.common.functions.ConsumerConfig; import org.apache.pulsar.common.functions.FunctionConfig; import org.apache.pulsar.common.functions.Resources; import org.apache.pulsar.common.functions.UpdateOptionsImpl; import org.apache.pulsar.common.functions.Utils; import org.apache.pulsar.common.io.ConnectorDefinition; import org.apache.pulsar.common.io.SinkConfig; import org.apache.pulsar.common.util.ObjectMapperFactory; import picocli.CommandLine.Command; import picocli.CommandLine.Option; @Getter @Command(description = "Interface for managing Pulsar IO sinks (egress data from Pulsar)", aliases = "sink") @Slf4j public class CmdSinks extends CmdBase { private final CreateSink createSink; private final UpdateSink updateSink; private final DeleteSink deleteSink; private final ListSinks listSinks; private final GetSink getSink; private final GetSinkStatus getSinkStatus; private final StopSink stopSink; private final StartSink startSink; private final RestartSink restartSink; private final LocalSinkRunner localSinkRunner; public CmdSinks(Supplier<PulsarAdmin> admin) { super("sinks", admin); createSink = new CreateSink(); updateSink = new UpdateSink(); deleteSink = new DeleteSink(); listSinks = new ListSinks(); getSink = new GetSink(); getSinkStatus = new GetSinkStatus(); stopSink = new StopSink(); startSink = new StartSink(); restartSink = new RestartSink(); localSinkRunner = new LocalSinkRunner(); addCommand("create", createSink); addCommand("update", updateSink); addCommand("delete", deleteSink); addCommand("list", listSinks); addCommand("get", getSink); // TODO deprecate getstatus addCommand("status", getSinkStatus, "getstatus"); addCommand("stop", stopSink); addCommand("start", startSink); addCommand("restart", restartSink); addCommand("localrun", localSinkRunner); addCommand("available-sinks", new ListBuiltInSinks()); addCommand("reload", new ReloadBuiltInSinks()); } /** * Base command. */ @Getter abstract class BaseCommand extends CliCommand { @Override void run() throws Exception { processArguments(); runCmd(); } void processArguments() throws Exception { } abstract void runCmd() throws Exception; } @Command(description = "Run a Pulsar IO sink connector locally " + "(rather than deploying it to the Pulsar cluster)") protected class LocalSinkRunner extends CreateSink { @Option(names = "--state-storage-service-url", description = "The URL for the state storage service (the default is Apache BookKeeper)") protected String stateStorageServiceUrl; @Option(names = "--brokerServiceUrl", description = "The URL for the Pulsar broker", hidden = true) protected String deprecatedBrokerServiceUrl; @Option(names = "--broker-service-url", description = "The URL for the Pulsar broker") protected String brokerServiceUrl; @Option(names = "--clientAuthPlugin", description = "Client authentication plugin using " + "which function-process can connect to broker", hidden = true) protected String deprecatedClientAuthPlugin; @Option(names = "--client-auth-plugin", description = "Client authentication plugin using which function-process can connect to broker") protected String clientAuthPlugin; @Option(names = "--clientAuthParams", description = "Client authentication param", hidden = true) protected String deprecatedClientAuthParams; @Option(names = "--client-auth-params", description = "Client authentication param") protected String clientAuthParams; @Option(names = "--use_tls", description = "Use tls connection", hidden = true) protected Boolean deprecatedUseTls; @Option(names = "--use-tls", description = "Use tls connection") protected boolean useTls; @Option(names = "--tls_allow_insecure", description = "Allow insecure tls connection", hidden = true) protected Boolean deprecatedTlsAllowInsecureConnection; @Option(names = "--tls-allow-insecure", description = "Allow insecure tls connection") protected boolean tlsAllowInsecureConnection; @Option(names = "--hostname_verification_enabled", description = "Enable hostname verification", hidden = true) protected Boolean deprecatedTlsHostNameVerificationEnabled; @Option(names = "--hostname-verification-enabled", description = "Enable hostname verification") protected boolean tlsHostNameVerificationEnabled; @Option(names = "--tls_trust_cert_path", description = "tls trust cert file path", hidden = true) protected String deprecatedTlsTrustCertFilePath; @Option(names = "--tls-trust-cert-path", description = "tls trust cert file path") protected String tlsTrustCertFilePath; @Option(names = "--secrets-provider-classname", description = "Whats the classname for secrets provider") protected String secretsProviderClassName; @Option(names = "--secrets-provider-config", description = "Config that needs to be passed to secrets provider") protected String secretsProviderConfig; @Option(names = "--metrics-port-start", description = "The starting port range for metrics server") protected String metricsPortStart; private void mergeArgs() { if (isBlank(brokerServiceUrl) && !isBlank(deprecatedBrokerServiceUrl)) { brokerServiceUrl = deprecatedBrokerServiceUrl; } if (isBlank(clientAuthPlugin) && !isBlank(deprecatedClientAuthPlugin)) { clientAuthPlugin = deprecatedClientAuthPlugin; } if (isBlank(clientAuthParams) && !isBlank(deprecatedClientAuthParams)) { clientAuthParams = deprecatedClientAuthParams; } if (!useTls && deprecatedUseTls != null) { useTls = deprecatedUseTls; } if (!tlsAllowInsecureConnection && deprecatedTlsAllowInsecureConnection != null) { tlsAllowInsecureConnection = deprecatedTlsAllowInsecureConnection; } if (!tlsHostNameVerificationEnabled && deprecatedTlsHostNameVerificationEnabled != null) { tlsHostNameVerificationEnabled = deprecatedTlsHostNameVerificationEnabled; } if (isBlank(tlsTrustCertFilePath) && !isBlank(deprecatedTlsTrustCertFilePath)) { tlsTrustCertFilePath = deprecatedTlsTrustCertFilePath; } } @VisibleForTesting List<String> getLocalRunArgs() throws Exception { // merge deprecated args with new args mergeArgs(); List<String> localRunArgs = new LinkedList<>(); localRunArgs.add(System.getenv("PULSAR_HOME") + "/bin/function-localrunner"); localRunArgs.add("--sinkConfig"); localRunArgs.add(new Gson().toJson(sinkConfig)); for (Field field : this.getClass().getDeclaredFields()) { if (field.getName().toUpperCase().startsWith("DEPRECATED")) { continue; } if (field.getName().contains("$")) { continue; } Object value = field.get(this); if (value != null) { localRunArgs.add("--" + field.getName()); localRunArgs.add(value.toString()); } } return localRunArgs; } @Override public void runCmd() throws Exception { ProcessBuilder processBuilder = new ProcessBuilder(getLocalRunArgs()).inheritIO(); Process process = processBuilder.start(); process.waitFor(); } @Override protected String validateSinkType(String sinkType) { return sinkType; } } @Command(description = "Submit a Pulsar IO sink connector to run in a Pulsar cluster") protected class CreateSink extends SinkDetailsCommand { @Override void runCmd() throws Exception { if (Utils.isFunctionPackageUrlSupported(archive)) { getAdmin().sinks().createSinkWithUrl(sinkConfig, sinkConfig.getArchive()); } else { getAdmin().sinks().createSink(sinkConfig, sinkConfig.getArchive()); } print("Created successfully"); } } @Command(description = "Update a Pulsar IO sink connector") protected class UpdateSink extends SinkDetailsCommand { @Option(names = "--update-auth-data", description = "Whether or not to update the auth data") protected boolean updateAuthData; @Override void runCmd() throws Exception { UpdateOptionsImpl updateOptions = new UpdateOptionsImpl(); updateOptions.setUpdateAuthData(updateAuthData); if (Utils.isFunctionPackageUrlSupported(archive)) { getAdmin().sinks().updateSinkWithUrl(sinkConfig, sinkConfig.getArchive(), updateOptions); } else { getAdmin().sinks().updateSink(sinkConfig, sinkConfig.getArchive(), updateOptions); } print("Updated successfully"); } protected void validateSinkConfigs(SinkConfig sinkConfig) { if (sinkConfig.getTenant() == null) { sinkConfig.setTenant(PUBLIC_TENANT); } if (sinkConfig.getNamespace() == null) { sinkConfig.setNamespace(DEFAULT_NAMESPACE); } } } abstract class SinkDetailsCommand extends BaseCommand { @Option(names = "--tenant", description = "The sink's tenant") protected String tenant; @Option(names = "--namespace", description = "The sink's namespace") protected String namespace; @Option(names = "--name", description = "The sink's name") protected String name; @Option(names = { "-t", "--sink-type" }, description = "The sinks's connector provider") protected String sinkType; @Option(names = "--cleanup-subscription", description = "Whether delete the subscription " + "when sink is deleted") protected Boolean cleanupSubscription; @Option(names = { "-i", "--inputs" }, description = "The sink's input topic or topics " + "(multiple topics can be specified as a comma-separated list)") protected String inputs; @Option(names = "--topicsPattern", description = "TopicsPattern to consume from list of topics " + "under a namespace that match the pattern. [--input] and [--topicsPattern] are mutually exclusive. " + "Add SerDe class name for a pattern in --customSerdeInputs (supported for java fun only)", hidden = true) protected String deprecatedTopicsPattern; @Option(names = "--topics-pattern", description = "The topic pattern to consume from a list of topics " + "under a namespace that matches the pattern. [--input] and [--topics-pattern] are mutually " + "exclusive. Add SerDe class name for a pattern in --custom-serde-inputs") protected String topicsPattern; @Option(names = "--subsName", description = "Pulsar source subscription name " + "if user wants a specific subscription-name for input-topic consumer", hidden = true) protected String deprecatedSubsName; @Option(names = "--subs-name", description = "Pulsar source subscription name " + "if user wants a specific subscription-name for input-topic consumer") protected String subsName; @Option(names = "--subs-position", description = "Pulsar source subscription position " + "if user wants to consume messages from the specified location") protected SubscriptionInitialPosition subsPosition; @Option(names = "--customSerdeInputs", description = "The map of input topics to SerDe class names (as a JSON string)", hidden = true) protected String deprecatedCustomSerdeInputString; @Option(names = "--custom-serde-inputs", description = "The map of input topics to SerDe class names (as a JSON string)") protected String customSerdeInputString; @Option(names = "--custom-schema-inputs", description = "The map of input topics to Schema types or class names (as a JSON string)") protected String customSchemaInputString; @Option(names = "--input-specs", description = "The map of inputs to custom configuration (as a JSON string)") protected String inputSpecs; @Option(names = "--max-redeliver-count", description = "Maximum number of times that a message " + "will be redelivered before being sent to the dead letter queue") protected Integer maxMessageRetries; @Option(names = "--dead-letter-topic", description = "Name of the dead topic where the failing messages will be sent.") protected String deadLetterTopic; @Option(names = "--processingGuarantees", description = "The processing guarantees (aka delivery semantics) applied to the sink", hidden = true) protected FunctionConfig.ProcessingGuarantees deprecatedProcessingGuarantees; @Option(names = "--processing-guarantees", description = "The processing guarantees (as known as delivery semantics) applied to the sink." + " The '--processing-guarantees' implementation in Pulsar also relies on sink implementation." + " The available values are `ATLEAST_ONCE`, `ATMOST_ONCE`, `EFFECTIVELY_ONCE`." + " If it is not specified, `ATLEAST_ONCE` delivery guarantee is used.") protected FunctionConfig.ProcessingGuarantees processingGuarantees; @Option(names = "--retainOrdering", description = "Sink consumes and sinks messages in order", hidden = true) protected Boolean deprecatedRetainOrdering; @Option(names = "--retain-ordering", description = "Sink consumes and sinks messages in order") protected Boolean retainOrdering; @Option(names = "--parallelism", description = "The sink's parallelism factor (i.e. the number of sink instances to run)") protected Integer parallelism; @Option(names = "--retain-key-ordering", description = "Sink consumes and processes messages in key order") protected Boolean retainKeyOrdering; @Option(names = {"-a", "--archive"}, description = "Path to the archive file for the sink. It also supports " + "url-path [http/https/file (file protocol assumes that file already exists on worker host)] from " + "which worker can download the package.") protected String archive; @Option(names = "--className", description = "The sink's class name if archive is file-url-path (file://)", hidden = true) protected String deprecatedClassName; @Option(names = "--classname", description = "The sink's class name if archive is file-url-path (file://)") protected String className; @Option(names = "--sinkConfigFile", description = "The path to a YAML config file specifying the " + "sink's configuration", hidden = true) protected String deprecatedSinkConfigFile; @Option(names = "--sink-config-file", description = "The path to a YAML config file specifying the " + "sink's configuration") protected String sinkConfigFile; @Option(names = "--cpu", description = "The CPU (in cores) that needs to be allocated " + "per sink instance (applicable only to Docker runtime)") protected Double cpu; @Option(names = "--ram", description = "The RAM (in bytes) that need to be allocated " + "per sink instance (applicable only to the process and Docker runtimes)") protected Long ram; @Option(names = "--disk", description = "The disk (in bytes) that need to be allocated " + "per sink instance (applicable only to Docker runtime)") protected Long disk; @Option(names = "--sinkConfig", description = "User defined configs key/values", hidden = true) protected String deprecatedSinkConfigString; @Option(names = "--sink-config", description = "User defined configs key/values") protected String sinkConfigString; @Option(names = "--auto-ack", description = "Whether or not the framework will automatically acknowledge messages", arity = "1") protected Boolean autoAck; @Option(names = "--timeout-ms", description = "The message timeout in milliseconds") protected Long timeoutMs; @Option(names = "--negative-ack-redelivery-delay-ms", description = "The negative ack message redelivery delay in milliseconds") protected Long negativeAckRedeliveryDelayMs; @Option(names = "--custom-runtime-options", description = "A string that encodes options to " + "customize the runtime, see docs for configured runtime for details") protected String customRuntimeOptions; @Option(names = "--secrets", description = "The map of secretName to an object that encapsulates " + "how the secret is fetched by the underlying secrets provider") protected String secretsString; @Option(names = "--transform-function", description = "Transform function applied before the Sink") protected String transformFunction; @Option(names = "--transform-function-classname", description = "The transform function class name") protected String transformFunctionClassName; @Option(names = "--transform-function-config", description = "Configuration of the transform function " + "applied before the Sink") protected String transformFunctionConfig; @Option(names = "--log-topic", description = "The topic to which the logs of a Pulsar Sink are produced") protected String logTopic; @Option(names = "--runtime-flags", description = "Any flags that you want to pass to a runtime" + " (for process & Kubernetes runtime only).") protected String runtimeFlags; protected SinkConfig sinkConfig; private void mergeArgs() { if (isBlank(subsName) && !isBlank(deprecatedSubsName)) { subsName = deprecatedSubsName; } if (isBlank(topicsPattern) && !isBlank(deprecatedTopicsPattern)) { topicsPattern = deprecatedTopicsPattern; } if (isBlank(customSerdeInputString) && !isBlank(deprecatedCustomSerdeInputString)) { customSerdeInputString = deprecatedCustomSerdeInputString; } if (processingGuarantees == null && deprecatedProcessingGuarantees != null) { processingGuarantees = deprecatedProcessingGuarantees; } if (retainOrdering == null && deprecatedRetainOrdering != null) { retainOrdering = deprecatedRetainOrdering; } if (isBlank(className) && !isBlank(deprecatedClassName)) { className = deprecatedClassName; } if (isBlank(sinkConfigFile) && !isBlank(deprecatedSinkConfigFile)) { sinkConfigFile = deprecatedSinkConfigFile; } if (isBlank(sinkConfigString) && !isBlank(deprecatedSinkConfigString)) { sinkConfigString = deprecatedSinkConfigString; } } @Override void processArguments() throws Exception { super.processArguments(); // merge deprecated args with new args mergeArgs(); if (null != sinkConfigFile) { this.sinkConfig = CmdUtils.loadConfig(sinkConfigFile, SinkConfig.class); } else { this.sinkConfig = new SinkConfig(); } if (null != tenant) { sinkConfig.setTenant(tenant); } if (null != namespace) { sinkConfig.setNamespace(namespace); } if (null != className) { sinkConfig.setClassName(className); } if (null != name) { sinkConfig.setName(name); } if (null != processingGuarantees) { sinkConfig.setProcessingGuarantees(processingGuarantees); } if (null != cleanupSubscription) { sinkConfig.setCleanupSubscription(cleanupSubscription); } if (retainOrdering != null) { sinkConfig.setRetainOrdering(retainOrdering); } if (retainKeyOrdering != null) { sinkConfig.setRetainKeyOrdering(retainKeyOrdering); } if (null != inputs) { sinkConfig.setInputs(Arrays.asList(inputs.split(","))); } if (null != customSerdeInputString) { Type type = new TypeToken<Map<String, String>>(){}.getType(); Map<String, String> customSerdeInputMap = new Gson().fromJson(customSerdeInputString, type); sinkConfig.setTopicToSerdeClassName(customSerdeInputMap); } if (null != customSchemaInputString) { Type type = new TypeToken<Map<String, String>>(){}.getType(); Map<String, String> customSchemaInputMap = new Gson().fromJson(customSchemaInputString, type); sinkConfig.setTopicToSchemaType(customSchemaInputMap); } if (null != inputSpecs) { Type type = new TypeToken<Map<String, ConsumerConfig>>(){}.getType(); sinkConfig.setInputSpecs(new Gson().fromJson(inputSpecs, type)); } sinkConfig.setMaxMessageRetries(maxMessageRetries); if (null != deadLetterTopic) { sinkConfig.setDeadLetterTopic(deadLetterTopic); } if (isNotBlank(subsName)) { sinkConfig.setSourceSubscriptionName(subsName); } if (null != subsPosition) { sinkConfig.setSourceSubscriptionPosition(subsPosition); } if (null != topicsPattern) { sinkConfig.setTopicsPattern(topicsPattern); } if (parallelism != null) { sinkConfig.setParallelism(parallelism); } if (archive != null && (sinkType != null || sinkConfig.getSinkType() != null)) { throw new ParameterException("Cannot specify both archive and sink-type"); } if (null != archive) { sinkConfig.setArchive(archive); } if (sinkType != null) { sinkConfig.setArchive(validateSinkType(sinkType)); } else if (sinkConfig.getSinkType() != null) { sinkConfig.setArchive(validateSinkType(sinkConfig.getSinkType())); } Resources resources = sinkConfig.getResources(); if (cpu != null) { if (resources == null) { resources = new Resources(); } resources.setCpu(cpu); } if (ram != null) { if (resources == null) { resources = new Resources(); } resources.setRam(ram); } if (disk != null) { if (resources == null) { resources = new Resources(); } resources.setDisk(disk); } if (resources != null) { sinkConfig.setResources(resources); } try { if (null != sinkConfigString) { sinkConfig.setConfigs(parseConfigs(sinkConfigString)); } } catch (Exception ex) { throw new IllegalArgumentException("Cannot parse sink-config", ex); } if (autoAck != null) { sinkConfig.setAutoAck(autoAck); } if (timeoutMs != null) { sinkConfig.setTimeoutMs(timeoutMs); } if (negativeAckRedeliveryDelayMs != null && negativeAckRedeliveryDelayMs > 0) { sinkConfig.setNegativeAckRedeliveryDelayMs(negativeAckRedeliveryDelayMs); } if (customRuntimeOptions != null) { sinkConfig.setCustomRuntimeOptions(customRuntimeOptions); } if (secretsString != null) { Type type = new TypeToken<Map<String, Object>>() {}.getType(); Map<String, Object> secretsMap = new Gson().fromJson(secretsString, type); if (secretsMap == null) { secretsMap = Collections.emptyMap(); } sinkConfig.setSecrets(secretsMap); } if (transformFunction != null) { sinkConfig.setTransformFunction(transformFunction); } if (transformFunctionClassName != null) { sinkConfig.setTransformFunctionClassName(transformFunctionClassName); } if (transformFunctionConfig != null) { sinkConfig.setTransformFunctionConfig(transformFunctionConfig); } if (null != logTopic) { sinkConfig.setLogTopic(logTopic); } if (null != runtimeFlags) { sinkConfig.setRuntimeFlags(runtimeFlags); } // check if configs are valid validateSinkConfigs(sinkConfig); } protected Map<String, Object> parseConfigs(String str) throws JsonProcessingException { ObjectMapper mapper = ObjectMapperFactory.getMapper().getObjectMapper(); TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() {}; return mapper.readValue(str, typeRef); } protected void validateSinkConfigs(SinkConfig sinkConfig) { if (isBlank(sinkConfig.getArchive())) { throw new ParameterException("Sink archive not specified"); } org.apache.pulsar.common.functions.Utils.inferMissingArguments(sinkConfig); if (!Utils.isFunctionPackageUrlSupported(sinkConfig.getArchive()) && !sinkConfig.getArchive().startsWith(Utils.BUILTIN)) { if (!new File(sinkConfig.getArchive()).exists()) { throw new IllegalArgumentException(String.format("Sink Archive file %s does not exist", sinkConfig.getArchive())); } } } protected String validateSinkType(String sinkType) throws IOException { Set<String> availableSinks; try { availableSinks = getAdmin().sinks().getBuiltInSinks().stream() .map(ConnectorDefinition::getName).collect(Collectors.toSet()); } catch (PulsarAdminException e) { throw new IOException(e); } if (!availableSinks.contains(sinkType)) { throw new ParameterException( "Invalid sink type '" + sinkType + "' -- Available sinks are: " + availableSinks); } // Source type is a valid built-in connector type return "builtin://" + sinkType; } } /** * Sink level command. */ @Getter abstract class SinkCommand extends BaseCommand { @Option(names = "--tenant", description = "The sink's tenant") protected String tenant; @Option(names = "--namespace", description = "The sink's namespace") protected String namespace; @Option(names = "--name", description = "The sink's name") protected String sinkName; @Override void processArguments() throws Exception { super.processArguments(); if (tenant == null) { tenant = PUBLIC_TENANT; } if (namespace == null) { namespace = DEFAULT_NAMESPACE; } if (null == sinkName) { throw new RuntimeException( "You must specify a name for the sink"); } } } @Command(description = "Stops a Pulsar IO sink connector") protected class DeleteSink extends SinkCommand { @Override void runCmd() throws Exception { getAdmin().sinks().deleteSink(tenant, namespace, sinkName); print("Deleted successfully"); } } @Command(description = "Gets the information about a Pulsar IO sink connector") protected class GetSink extends SinkCommand { @Override void runCmd() throws Exception { SinkConfig sinkConfig = getAdmin().sinks().getSink(tenant, namespace, sinkName); Gson gson = new GsonBuilder().setPrettyPrinting().create(); System.out.println(gson.toJson(sinkConfig)); } } /** * List Sources command. */ @Command(description = "List all running Pulsar IO sink connectors") protected class ListSinks extends BaseCommand { @Option(names = "--tenant", description = "The sink's tenant") protected String tenant; @Option(names = "--namespace", description = "The sink's namespace") protected String namespace; @Override public void processArguments() { if (tenant == null) { tenant = PUBLIC_TENANT; } if (namespace == null) { namespace = DEFAULT_NAMESPACE; } } @Override void runCmd() throws Exception { List<String> sinks = getAdmin().sinks().listSinks(tenant, namespace); Gson gson = new GsonBuilder().setPrettyPrinting().create(); System.out.println(gson.toJson(sinks)); } } @Command(description = "Check the current status of a Pulsar Sink") class GetSinkStatus extends SinkCommand { @Option(names = "--instance-id", description = "The sink instanceId (Get-status of all instances if instance-id is not provided") protected String instanceId; @Override void runCmd() throws Exception { if (isBlank(instanceId)) { print(getAdmin().sinks().getSinkStatus(tenant, namespace, sinkName)); } else { print(getAdmin().sinks().getSinkStatus(tenant, namespace, sinkName, Integer.parseInt(instanceId))); } } } @Command(description = "Restart sink instance") class RestartSink extends SinkCommand { @Option(names = "--instance-id", description = "The sink instanceId (restart all instances if instance-id is not provided") protected String instanceId; @Override void runCmd() throws Exception { if (isNotBlank(instanceId)) { try { getAdmin().sinks().restartSink(tenant, namespace, sinkName, Integer.parseInt(instanceId)); } catch (NumberFormatException e) { System.err.println("instance-id must be a number"); } } else { getAdmin().sinks().restartSink(tenant, namespace, sinkName); } System.out.println("Restarted successfully"); } } @Command(description = "Stops sink instance") class StopSink extends SinkCommand { @Option(names = "--instance-id", description = "The sink instanceId (stop all instances if instance-id is not provided") protected String instanceId; @Override void runCmd() throws Exception { if (isNotBlank(instanceId)) { try { getAdmin().sinks().stopSink(tenant, namespace, sinkName, Integer.parseInt(instanceId)); } catch (NumberFormatException e) { System.err.println("instance-id must be a number"); } } else { getAdmin().sinks().stopSink(tenant, namespace, sinkName); } System.out.println("Stopped successfully"); } } @Command(description = "Starts sink instance") class StartSink extends SinkCommand { @Option(names = "--instance-id", description = "The sink instanceId (start all instances if instance-id is not provided") protected String instanceId; @Override void runCmd() throws Exception { if (isNotBlank(instanceId)) { try { getAdmin().sinks().startSink(tenant, namespace, sinkName, Integer.parseInt(instanceId)); } catch (NumberFormatException e) { System.err.println("instance-id must be a number"); } } else { getAdmin().sinks().startSink(tenant, namespace, sinkName); } System.out.println("Started successfully"); } } @Command(description = "Get the list of Pulsar IO connector sinks supported by Pulsar cluster") public class ListBuiltInSinks extends BaseCommand { @Override void runCmd() throws Exception { getAdmin().sinks().getBuiltInSinks().stream().filter(x -> isNotBlank(x.getSinkClass())) .forEach(connector -> { System.out.println(connector.getName()); System.out.println(WordUtils.wrap(connector.getDescription(), 80)); System.out.println("----------------------------------------"); }); } } @Command(description = "Reload the available built-in connectors") public class ReloadBuiltInSinks extends BaseCommand { @Override void runCmd() throws Exception { getAdmin().sinks().reloadBuiltInSinks(); } } }
google/sagetv
36,966
java/sage/DShowTVPlayer.java
/* * Copyright 2015 The SageTV Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sage; public class DShowTVPlayer extends DShowMediaPlayer { protected static final String PS_HW_DECODER_FILTER = "ps_hw_decoder_filter"; protected static final String DISABLE_SW_DECODING = "disable_sw_decoding"; protected static final String DXVA_MPEG_MODE = "dxva_mpeg_mode"; protected static final String FORCE_DEINTERLACE = "force_deinterlace"; private static final String USE_DSCALER = "use_dscaler"; private static final String DSCALER_MODE = "dscaler_mode"; private static final String DSCALER_DOUBLE_REFRESH = "dscaler_double_refresh"; private static final String DSCALER_ODD_FIELD_FIRST = "dscaler_odd_field_first"; private static final String DSCALER_PLUGIN = "dscaler_plugin"; private static final String DISABLE_ELECARD_DEINTERLACING = "disable_elecard_deinterlacing"; protected static final String DSCALER_GUID = "{437B0D3A-4689-4FA6-A7DD-EB4928203C2F}"; public static final int SAGE_DXVA_MPEGA = 1; public static final int SAGE_DXVA_MPEGB = 2; public static final int SAGE_DXVA_MPEGC = 3; public static final int SAGE_DXVA_MPEGD = 4; public static final String[] DXVA_MPEG_MODE_NAMES = {Sage.rez("Default"), "A","B","C","D"}; public static String getDXVAName(int x) { switch (x) { case SAGE_DXVA_MPEGA: return "A"; case SAGE_DXVA_MPEGB: return "B"; case SAGE_DXVA_MPEGC: return "C"; case SAGE_DXVA_MPEGD: return "D"; } return Sage.rez("Default"); } public static int getDXVACode(String x) { if ("A".equals(x)) return SAGE_DXVA_MPEGA; else if ("B".equals(x)) return SAGE_DXVA_MPEGB; else if ("C".equals(x)) return SAGE_DXVA_MPEGC; else if ("D".equals(x)) return SAGE_DXVA_MPEGD; else return 0; } public static final int DSCALER_MODE_WEAVE = 1020; public static final int DSCALER_MODE_TWO_FRAME = 1022; public static final int DSCALER_MODE_FIELD_BOB = 1024; public static final int DSCALER_MODE_BOB = 1021; public static final int DSCALER_MODE_BLENDED_CLIPPING = 1023; public static final int DSCALER_MODE_PLUGIN = 1025; public static final String[] DSCALER_MODE_NAMES = { Sage.rez("Blended_Clipping"), Sage.rez("Bob"), Sage.rez("Field_Bob"), Sage.rez("Plugin"), Sage.rez("Two_Frame"), Sage.rez("Weave")}; public static String getDscalerName(int x) { switch (x) { case DSCALER_MODE_WEAVE: return Sage.rez("Weave"); case DSCALER_MODE_TWO_FRAME: return Sage.rez("Two_Frame"); case DSCALER_MODE_FIELD_BOB: return Sage.rez("Field_Bob"); case DSCALER_MODE_BOB: return Sage.rez("Bob"); case DSCALER_MODE_BLENDED_CLIPPING: return Sage.rez("Blended_Clipping"); case DSCALER_MODE_PLUGIN: return Sage.rez("Plugin"); } return ""; } public static int getDscalerCode(String x) { if (Sage.rez("Weave").equals(x)) return DSCALER_MODE_WEAVE; else if (Sage.rez("Two_Frame").equals(x)) return DSCALER_MODE_TWO_FRAME; else if (Sage.rez("Field_Bob").equals(x)) return DSCALER_MODE_FIELD_BOB; else if (Sage.rez("Bob").equals(x)) return DSCALER_MODE_BOB; else if (Sage.rez("Blended_Clipping").equals(x)) return DSCALER_MODE_BLENDED_CLIPPING; else if (Sage.rez("Plugin").equals(x)) return DSCALER_MODE_PLUGIN; else return 0; } public static final int SAGE_DEINTERLACE_BOB = 1; public static final int SAGE_DEINTERLACE_WEAVE = 2; public static final int SAGE_DEINTERLACE_BOBWEAVE = 3; public static final String[] DEINTERLACE_NAMES = { Sage.rez("Default"), Sage.rez("Bob"), Sage.rez("Weave"), Sage.rez("Bob_and_Weave")}; public static String getDeinterlaceName(int x) { if (x == SAGE_DEINTERLACE_BOB) return Sage.rez("Bob"); else if (x == SAGE_DEINTERLACE_WEAVE) return Sage.rez("Weave"); else if (x == SAGE_DEINTERLACE_BOBWEAVE) return Sage.rez("Bob_and_Weave"); else return Sage.rez("Default"); } public static int getDeinterlaceCode(String x) { if (Sage.rez("Weave").equals(x)) return SAGE_DEINTERLACE_WEAVE; else if (Sage.rez("Bob").equals(x)) return SAGE_DEINTERLACE_BOB; else if (Sage.rez("Bob_and_Weave").equals(x)) return SAGE_DEINTERLACE_BOBWEAVE; else return 0; } public static boolean getDisableSWDecoding() { return Sage.getBoolean(PREFS_ROOT + DISABLE_SW_DECODING, false); } public static void setDisableSWDecoding(boolean x) { Sage.putBoolean(PREFS_ROOT + DISABLE_SW_DECODING, x); } public static boolean getUseDscaler() { return Sage.getBoolean(PREFS_ROOT + USE_DSCALER, false); } public static void setUseDscaler(boolean x) { Sage.putBoolean(PREFS_ROOT + USE_DSCALER, x); } public static int getForceDeinterlace() { return Sage.getInt(PREFS_ROOT + FORCE_DEINTERLACE, 0); } public static void setForceDeinterlace(int x) { Sage.putInt(PREFS_ROOT + FORCE_DEINTERLACE, x); } public static int getDxvaMpegMode() { return Sage.getInt(PREFS_ROOT + DXVA_MPEG_MODE, 0); } public static void setDxvaMpegMode(int x) { Sage.putInt(PREFS_ROOT + DXVA_MPEG_MODE, x); } public static int getDscalerMode() { return Sage.getInt(PREFS_ROOT + DSCALER_MODE, 0); } public static void setDscalerMode(int x) { Sage.putInt(PREFS_ROOT + DSCALER_MODE, x); } public static boolean getDscalerDoubleRefresh() { return Sage.getBoolean(PREFS_ROOT + DSCALER_DOUBLE_REFRESH, false); } public static void setDscalerDoubleRefresh(boolean x){ Sage.putBoolean(PREFS_ROOT + DSCALER_DOUBLE_REFRESH, x); } public static boolean getDscalerOddFieldFirst() { return Sage.getBoolean(PREFS_ROOT + DSCALER_ODD_FIELD_FIRST, false); } public static void setDscalerOddFieldFirst(boolean x){ Sage.putBoolean(PREFS_ROOT + DSCALER_ODD_FIELD_FIRST, x); } public static String getDscalerPlugin() { return Sage.get(PREFS_ROOT + DSCALER_PLUGIN, ""); } public static void setDscalerPlugin(String x){ Sage.put(PREFS_ROOT + DSCALER_PLUGIN, x); } public static String[] getDscalerPlugins() { if (!Sage.WINDOWS_OS) return new String[0]; // These all start with DI_ and end with .dll and are in the IRTunerPlugins directory String irPluginDir = Sage.readStringValue(Sage.HKEY_LOCAL_MACHINE, "SOFTWARE\\Frey Technologies\\Common", "IRTunerPluginsDir"); if (irPluginDir == null) irPluginDir = System.getProperty("user.dir"); String[] suspectDLLFiles = new java.io.File(irPluginDir). list(new java.io.FilenameFilter(){ public boolean accept(java.io.File dir,String filename){return filename.toLowerCase().endsWith(".dll") && filename.startsWith("DI_");}}); String[] pluginDLLs = (suspectDLLFiles == null) ? new String[0] : new String[suspectDLLFiles.length]; for (int i = 0; i < pluginDLLs.length; i++) pluginDLLs[i] = suspectDLLFiles[i].substring(3, suspectDLLFiles[i].length() - 4); return pluginDLLs; } public static boolean hasPVR350HWDecoder() { if (!Sage.WINDOWS_OS) return false; String[] devs = DShowCaptureDevice.getDevicesInCategory0(DShowCaptureManager.HW_DECODER_CATEGORY_GUID); for (int i = 0; i < devs.length; i++) if (devs[i].indexOf("PVR") != -1) return true; return false; } public static boolean getEnableHWDecoder() { return Sage.get(PREFS_ROOT + PS_HW_DECODER_FILTER, "").length() > 0; } public static void setEnableHWDecoder(boolean x) { if (x && Sage.WINDOWS_OS) { String[] devs = DShowCaptureDevice.getDevicesInCategory0(DShowCaptureManager.HW_DECODER_CATEGORY_GUID); for (int i = 0; i < devs.length; i++) { if (devs[i].indexOf("PVR") != -1) { Sage.put(PREFS_ROOT + PS_HW_DECODER_FILTER, devs[i]); return; } } if (devs.length > 0) { Sage.put(PREFS_ROOT + PS_HW_DECODER_FILTER, devs[0]); return; } } Sage.put(PREFS_ROOT + PS_HW_DECODER_FILTER, ""); } public static String getAudioDecoderFilter() { return Sage.get(PREFS_ROOT + AUDIO_DECODER_FILTER, "SageTV MPEG Audio Decoder"); } public static void setAudioDecoderFilter(String x) { Sage.put(PREFS_ROOT + AUDIO_DECODER_FILTER, x); } public static String getVideoDecoderFilter() { return Sage.get(PREFS_ROOT + VIDEO_DECODER_FILTER, "SageTV MPEG Video Decoder"); } public static void setVideoDecoderFilter(String x) { Sage.put(PREFS_ROOT + VIDEO_DECODER_FILTER, x); } public static void autoOptimize(boolean onlyOnVirgins) { if (!Sage.WINDOWS_OS) return; String[] dshowFilters = DShowCaptureDevice.getDevicesInCategory0(DShowCaptureManager.FILTERS_CATEGORY_GUID); if (dshowFilters.length != 0) java.util.Arrays.sort(dshowFilters, filterNameCompare); if (dshowFilters.length != 0 && (!onlyOnVirgins || (Sage.get(PREFS_ROOT + AUDIO_DECODER_FILTER, "SageTV MPEG Audio Decoder").length() == 0 && Sage.get(PREFS_ROOT + VIDEO_DECODER_FILTER, "SageTV MPEG Video Decoder").length() == 0))) { if (Sage.DBG) System.out.println("Player AUTO OPTIMIZATION IS RUNNING..."); if (Sage.DBG) System.out.println("Searching for existence of SageTV decoders..."); if (java.util.Arrays.binarySearch(dshowFilters, "SageTV MPEG Video Decoder", filterNameCompare) >= 0 && java.util.Arrays.binarySearch(dshowFilters, "SageTV MPEG Audio Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Default SageTV configuration will be used"); setVideoDecoderFilter("SageTV MPEG Video Decoder"); setAudioDecoderFilter("SageTV MPEG Audio Decoder"); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Searching for existence of LAV decoders..."); if (java.util.Arrays.binarySearch(dshowFilters, "LAV Video Decoder", filterNameCompare) >= 0 && java.util.Arrays.binarySearch(dshowFilters, "LAV Audio Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Default LAV configuration will be used"); setVideoDecoderFilter("LAV Video Decoder"); setAudioDecoderFilter("LAV Audio Decoder"); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Searching for existence of Hauppauge card & Intervideo decoders..."); if (java.util.Arrays.binarySearch(dshowFilters, "InterVideo NonCSS Video Decoder for Hauppauge", filterNameCompare) >= 0 && java.util.Arrays.binarySearch(dshowFilters, "InterVideo NonCSS Audio Decoder for Hauppauge", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Default Hauppauge/Intervideo configuration will be used"); setVideoDecoderFilter("InterVideo NonCSS Video Decoder for Hauppauge"); setAudioDecoderFilter("InterVideo NonCSS Audio Decoder for Hauppauge"); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Searching for existence of Elecard Video Decoder..."); if (java.util.Arrays.binarySearch(dshowFilters, "Elecard MPEG2 Video Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Elecard filter detected, using Elecard/DScaler combination for playback"); setVideoDecoderFilter("Elecard MPEG2 Video Decoder"); if (java.util.Arrays.binarySearch(dshowFilters, "MPEG Audio Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Using the system MPEG Audio Decoder"); setAudioDecoderFilter("MPEG Audio Decoder"); } /*else if (java.util.Arrays.binarySearch(dshowFilters, "Moonlight Odio Dekoda", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Using the Moonlight Odio Dekoda"); setAudioDecoderFilter("Moonlight Odio Dekoda"); } else { if (Sage.DBG) System.out.println("No desirable MPEG audio decoder found, defaulting it"); setAudioDecoderFilter(Sage.rez("Default")); } setUseOverlay(true);*/ setUseDscaler(true); setDscalerMode(DSCALER_MODE_BOB); setDscalerDoubleRefresh(false); setDscalerOddFieldFirst(false); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Searching for existence of Sonic Decoders..."); if (java.util.Arrays.binarySearch(dshowFilters, "Sonic Cinemaster@ DS Video Decoder", filterNameCompare) >= 0 && java.util.Arrays.binarySearch(dshowFilters, "Sonic Cinemaster@ DS Audio Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Sonic filters detected and being used"); setVideoDecoderFilter("Sonic Cinemaster@ DS Video Decoder"); setAudioDecoderFilter("Sonic Cinemaster@ DS Audio Decoder"); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Searching for existence of Ravisent Decoders..."); if (java.util.Arrays.binarySearch(dshowFilters, "RAVISENT Cinemaster DS Video Decoder", filterNameCompare) >= 0 && java.util.Arrays.binarySearch(dshowFilters, "RAVISENT Cinemaster DS Audio Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Ravisent filters detected and being used"); setVideoDecoderFilter("RAVISENT Cinemaster DS Video Decoder"); setAudioDecoderFilter("RAVISENT Cinemaster DS Audio Decoder"); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Searching for existence of Intervideo Decoders..."); if (java.util.Arrays.binarySearch(dshowFilters, "InterVideo Video Decoder", filterNameCompare) >= 0 && java.util.Arrays.binarySearch(dshowFilters, "InterVideo Audio Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Intervideo filters detected and being used"); setVideoDecoderFilter("InterVideo Video Decoder"); setAudioDecoderFilter("InterVideo Audio Decoder"); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Searching for existence of Cyberlink Decoders..."); if (java.util.Arrays.binarySearch(dshowFilters, "Cyberlink Video/SP Decoder", filterNameCompare) >= 0 && java.util.Arrays.binarySearch(dshowFilters, "Cyberlink Audio Decoder", filterNameCompare) >= 0) { if (Sage.DBG) System.out.println("Cyberlink filters detected and being used"); setVideoDecoderFilter("Cyberlink Video/SP Decoder"); setAudioDecoderFilter("Cyberlink Audio Decoder"); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); return; } if (Sage.DBG) System.out.println("Nothing suitable found for MPEG2, using system defaults"); setVideoDecoderFilter(Sage.rez("Default")); setAudioDecoderFilter(Sage.rez("Default")); // setUseOverlay(true); setAudioRenderFilter(Sage.rez("Default")); } } public DShowTVPlayer() { super(); } public boolean canFastLoad(byte majorTypeHint, byte minorTypeHint, String encodingHint, java.io.File file) { // Type checking is handled in VideoFrame so we can safely accept this whenever its presented to us return true;/*currHintEncoding != null && currHintEncoding.length() != 0 && encodingHint != null && encodingHint.length() != 0 && encodingHint.equals(currHintEncoding) && majorTypeHint == currHintMajorType && minorTypeHint == currHintMinorType && currHintMinorType != MediaFile.MEDIASUBTYPE_MPEG2_TS;*/ // We can't fast switch on TS currently because it uses a different demux which doesn't support it } public int getPlaybackCaps() { return super.getPlaybackCaps() | TIMESHIFT_CAP; } public synchronized void fastLoad(byte majorTypeHint, byte minorTypeHint, String encodingHint, java.io.File file, String hostname, boolean timeshifted, long bufferSize, boolean waitUntilDone) throws PlaybackException { eos = false; setTimeshifting0(pHandle, timeshifted, bufferSize); currTimeshifted = timeshifted; long oldDur = getDurationMillis0(pHandle); switchLoadTVFile0(pHandle, file.getPath(), hostname, waitUntilDone); currHintMajorType = majorTypeHint; currHintMinorType = minorTypeHint; currHintEncoding = encodingHint; currFile = file; int lastState = currState; currState = LOADED_STATE; // Seek it back to the beginning, so we're loaded correctly. // seek(0); // Put the graph back into the same state as before if (lastState == PLAY_STATE) play(); else pause(); // Wait until the file is loaded and the duration is updated before we return or we'll // give an incorrect duration to the caller. int numWaits = 20; while (!timeshifted && numWaits >= 0 && getDurationMillis0(pHandle) == oldDur) { if (Sage.DBG && numWaits % 5 == 0) System.out.println("Waiting for duration to update from file change..."); try{Thread.sleep(50);}catch(Exception e){} numWaits--; } } public synchronized void load(byte majorTypeHint, byte minorTypeHint, String encodingHint, java.io.File file, String hostname, boolean timeshifted, long bufferSize) throws PlaybackException { eos = false; pHandle = createGraph0(); uiMgr = VideoFrame.getVideoFrameForPlayer(this).getUIMgr(); lastVolume = (uiMgr.getBoolean("media_player_uses_system_volume", Sage.WINDOWS_OS && !Sage.VISTA_OS)) ? 1.0f : uiMgr.getFloat("videoframe/last_dshow_volume", 1.0f); currHintMajorType = majorTypeHint; currHintMinorType = minorTypeHint; currHintEncoding = encodingHint; MediaFile currMF = VideoFrame.getMediaFileForPlayer(DShowTVPlayer.this); // Do this before we load the player so we don't screw up the driver if the mount fails due to network issues if (file.isFile() && currMF.isBluRay()) { // This is an ISO image instead of a DVD directory; so mount it and then change the file path to be the image java.io.File mountDir = FSManager.getInstance().requestISOMount(file, uiMgr); if (mountDir == null) { if (Sage.DBG) System.out.println("FAILED mounting ISO image for BluRay playback"); throw new PlaybackException(PlaybackException.FILESYSTEM, 0); } unmountRequired = mountDir; if (new java.io.File(mountDir, "bdmv").isDirectory()) file = new java.io.File(mountDir, "bdmv"); else if (new java.io.File(mountDir, "BDMV").isDirectory()) file = new java.io.File(mountDir, "BDMV"); else file = mountDir; // If we're doing an ISO mount; then we have local access and are not going through the server hostname = null; } String[] fileSequence = null; if (currMF.isBluRay()) { sage.media.bluray.BluRayParser mybdp = new sage.media.bluray.BluRayParser(file, hostname); try { mybdp.fullyAnalyze(); } catch (java.io.IOException ioe) { if (Sage.DBG) System.out.println("IO Error analyzing BluRay file structure of:" + ioe); throw new PlaybackException(PlaybackException.FILESYSTEM, 0); } bdp = mybdp; currBDTitle = uiMgr.getVideoFrame().getBluRayTargetTitle(); if (currBDTitle <= 0) currBDTitle = bdp.getMainPlaylistIndex() + 1; currBDTitle = Math.max(1, Math.min(currBDTitle, bdp.getNumPlaylists())); sage.media.bluray.MPLSObject currPlaylist = bdp.getPlaylist(currBDTitle - 1); fileSequence = new String[currPlaylist.playlistItems.length]; long[] ptsOffsets = new long[fileSequence.length]; java.io.File streamDir = new java.io.File(file, "STREAM"); long[] totalPts = new long[fileSequence.length]; for (int i = 0; i < fileSequence.length; i++) { fileSequence[i] = new java.io.File(streamDir, currPlaylist.playlistItems[i].itemClips[0].clipName + (bdp.doesUseShortFilenames() ? ".MTS" : ".m2ts")).getPath(); ptsOffsets[i] = (i == 0 ? 0 : totalPts[i - 1]) - currPlaylist.playlistItems[i].inTime; totalPts[i] = (i == 0 ? 0 : totalPts[i - 1]) + (currPlaylist.playlistItems[i].outTime - currPlaylist.playlistItems[i].inTime); } chapterOffsets = new long[currPlaylist.playlistMarks.length]; for (int i = 0; i < chapterOffsets.length; i++) { int itemRef = currPlaylist.playlistMarks[i].playItemIdRef; chapterOffsets[i] = (itemRef == 0 ? 0 : totalPts[itemRef - 1]) + currPlaylist.playlistMarks[i].timestamp - currPlaylist.playlistItems[itemRef].inTime; } // If the last file in the sequence is smaller than 32K, then drop it because that causes issues in our demux if (fileSequence.length > 1 && new java.io.File(fileSequence[fileSequence.length - 1]).length() < 32768) { if (Sage.DBG) System.out.println("Removing last file from BluRay sequence due to its small size"); String[] newFS = new String[fileSequence.length - 1]; System.arraycopy(fileSequence, 0, newFS, 0, newFS.length); fileSequence = newFS; } if (sage.Sage.DBG) System.out.println("Established BluRay file sequence with " + fileSequence.length + " segments"); } // Set the default language index before we do the filters so we get the right audio codec selected setDefaultLangIndex(); String hwDecoder = Sage.get(prefs + PS_HW_DECODER_FILTER, ""); boolean disableSWDecoding = hwDecoder.length() > 0 && Sage.getBoolean(prefs + DISABLE_SW_DECODING, false); if (!disableSWDecoding) setFilters(); if (hwDecoder.length() > 0) { if (Sage.DBG) System.out.println("Setting MpegDeMux NumBuffers to " + Sage.getInt("pvr350_demux_numbuffers", 32) + " to avoid PVR350 lockups"); Sage.writeDwordValue(Sage.HKEY_LOCAL_MACHINE, "Software\\Frey Technologies\\Common\\DSFilters\\MpegDeMux", "NumBuffers", Sage.getInt("pvr350_demux_numbuffers", 32)); } setTimeshifting0(pHandle, timeshifted, bufferSize); currTimeshifted = timeshifted; if (currMF.isBluRay()) { setupGraphMultifile0(pHandle, fileSequence, hostname, !disableSWDecoding, !disableSWDecoding); } else { setupGraph0(pHandle, file != null ? file.getPath() : null, hostname, !disableSWDecoding && !"".equals(currMF.getPrimaryVideoFormat()), !disableSWDecoding); } if (transparency != TRANSLUCENT && (majorTypeHint == MediaFile.MEDIATYPE_VIDEO || majorTypeHint == MediaFile.MEDIATYPE_DVD || majorTypeHint == MediaFile.MEDIATYPE_BLURAY) && !disableSWDecoding) // not vmr & video is present, so we need to render to the HWND setVideoHWND0(pHandle, VideoFrame.getVideoFrameForPlayer(this).getVideoHandle()); colorKey = null; currCCState = -1; videoDimensions = null; getVideoDimensions(); if (hwDecoder.length() > 0) addHWDecoderFilter0(pHandle, hwDecoder, disableSWDecoding); currFile = file; currState = LOADED_STATE; getColorKey(); setNotificationWindow0(pHandle, Sage.mainHwnd); } public void free() { if (changedElecardRegistry) { Sage.writeDwordValue(Sage.HKEY_CURRENT_USER, "Software\\Elecard\\MPEG2 Video Decoder", "SoftwareBob", oldElecardRegistryValue); changedElecardRegistry = false; } super.free(); if (unmountRequired != null) { java.io.File removeMe = unmountRequired; unmountRequired = null; FSManager.getInstance().releaseISOMount(removeMe); } } protected void setFilters() throws PlaybackException { String vidDec = Sage.get(prefs + VIDEO_DECODER_FILTER, "SageTV MPEG Video Decoder"); java.util.Map renderOptions = new java.util.HashMap(); nullVideoDim = false; // See if the content is interlaced. If it's not we don't want to use DScaler. sage.media.format.ContainerFormat cf = getCurrFormat(); boolean isInterlaced = true; boolean audioOnly = cf != null && cf.getVideoFormat() == null; sage.media.format.VideoFormat vidForm = null; if (cf != null) { vidForm = cf.getVideoFormat(); isInterlaced = vidForm != null && vidForm.isInterlaced(); } if (!audioOnly && Sage.get("videoframe/video_postprocessing_filter", "").length() > 0) { setVideoPostProcessingFilter0(pHandle, Sage.get("videoframe/video_postprocessing_filter", ""), null); } else if (!audioOnly && isInterlaced && getUseDscaler() && (vidDec.toLowerCase().indexOf("elecard") != -1 || vidDec.toLowerCase().indexOf("mainconcept") != -1 || vidDec.toLowerCase().indexOf("sagetv") != -1 || !Sage.getBoolean("videoframe/require_compatible_decoder_for_dscaler", true))) { java.util.Map dscOptions = new java.util.HashMap(); int dscalerMode = getDscalerMode(); dscOptions.put(DSCALER_MODE, new Integer(dscalerMode)); dscOptions.put(DSCALER_ODD_FIELD_FIRST, Boolean.valueOf(getDscalerOddFieldFirst())); dscOptions.put(DSCALER_DOUBLE_REFRESH, Boolean.valueOf(getDscalerDoubleRefresh())); if (dscalerMode == DSCALER_MODE_PLUGIN) dscOptions.put(DSCALER_PLUGIN, getDscalerPlugin()); setVideoPostProcessingFilter0(pHandle, DSCALER_GUID, dscOptions); } else if (!audioOnly) { int dxvaMode = getDxvaMpegMode(); if (dxvaMode != 0) renderOptions.put(DXVA_MPEG_MODE, new Integer(dxvaMode)); int deinterlaceMode = getForceDeinterlace(); if (deinterlaceMode != 0) renderOptions.put(FORCE_DEINTERLACE, new Integer(deinterlaceMode)); } if (!audioOnly && (getUseEvr() || getUseVmr())) { if (DirectX9SageRenderer.getD3DObjectPtr() != 0 && DirectX9SageRenderer.getD3DDevicePtr() != 0 && (getUseVmr() || DirectX9SageRenderer.getD3DDeviceMgr() != 0)) { renderOptions.put("d3d_device_ptr", new Long(DirectX9SageRenderer.getD3DDevicePtr())); renderOptions.put("d3d_object_ptr", new Long(DirectX9SageRenderer.getD3DObjectPtr())); renderOptions.put("d3d_device_mgr", new Long(DirectX9SageRenderer.getD3DDeviceMgr())); // If the video height doesn't match that of a standard broadcast; then don't do the // CC filter insertion since that can mess up the video dimensions w/ VMR9/EVR int videoHeight = (vidForm != null) ? vidForm.getHeight() : 480; renderOptions.put("enable_cc", Boolean.valueOf(!Sage.getBoolean("videoframe/do_not_insert_directshow_cc_filter", false) && !VideoFrame.getMediaFileForPlayer(DShowTVPlayer.this).isBluRay() && (videoHeight == 480 || videoHeight == 576 || videoHeight == 720 || videoHeight == 1080 || videoHeight == 540))); setVideoRendererFilter0(pHandle, getUseEvr() ? EVR_GUID : VMR9_GUID, renderOptions); nullVideoDim = true; transparency = TRANSLUCENT; } else { setVideoRendererFilter0(pHandle, OVERLAY_GUID, renderOptions); transparency = BITMASK; } } else if (!audioOnly && getUseOverlay()) { setVideoRendererFilter0(pHandle, OVERLAY_GUID, renderOptions); transparency = BITMASK; } else { nullVideoDim = true; transparency = OPAQUE; } String audDec = Sage.get(prefs + AUDIO_RENDER_FILTER, "Default"); if (audDec.length() > 0 && !"Default".equals(audDec) && !Sage.rez("Default").equals(audDec)) setAudioRendererFilter0(pHandle, audDec, null); String audType = cf != null ? cf.getPrimaryAudioFormat() : ""; if (Sage.readDwordValue(Sage.HKEY_LOCAL_MACHINE, "Software\\Frey Technologies\\Common\\DSFilters\\MpegDeMux", "EnableHDAudio") == 0) { if (sage.media.format.MediaFormat.DTS_HD.equals(audType) || sage.media.format.MediaFormat.DTS_MA.equals(audType)) audType = sage.media.format.MediaFormat.DTS; else if (sage.media.format.MediaFormat.DOLBY_HD.equals(audType)) audType = sage.media.format.MediaFormat.AC3; } // Check for H.264 video and use that filter in that case String primaryVidForm = cf != null ? cf.getPrimaryVideoFormat() : ""; boolean isH264 = sage.media.format.MediaFormat.H264.equals(primaryVidForm); if (isH264) { setupH264DecoderFilter(); } // The video decoder filters are for MPEG2, not for DivX for MPEG4 so don't add them in that case else if (!audioOnly && vidDec.length() > 0 && !"Default".equals(vidDec) && !Sage.rez("Default").equals(vidDec) && sage.media.format.MediaFormat.MPEG2_VIDEO.equals(primaryVidForm)) { // If we're using an Elecard filter and DScaler then we need to specify to turn off // SW deinterlacing for Elecard if (getUseDscaler() && vidDec.toLowerCase().indexOf("elecard") != -1) { changedElecardRegistry = true; oldElecardRegistryValue = Sage.readDwordValue(Sage.HKEY_CURRENT_USER, "Software\\Elecard\\MPEG2 Video Decoder", "SoftwareBob"); Sage.writeDwordValue(Sage.HKEY_CURRENT_USER, "Software\\Elecard\\MPEG2 Video Decoder", "SoftwareBob", 0); } if (sage.media.format.MediaFormat.MP2.equals(audType)) { String altVidDec = Sage.get(prefs + VIDEO_DECODER_FILTER + "_alt", ""); if (altVidDec.length() > 0) vidDec = altVidDec; } setVideoDecoderFilter0(pHandle, vidDec, null); } else if (!audioOnly) { // Check for a format specific decoder if (primaryVidForm.length() > 0) vidDec = Sage.get(prefs + primaryVidForm.toLowerCase() + "_" + VIDEO_DECODER_FILTER, ""); // Default to the WindowsMedia VC1 decoder if another one isn't specified if (vidDec.length() == 0 && sage.media.format.MediaFormat.VC1.equals(primaryVidForm)) vidDec = "WMVideo Decoder DMO"; if (vidDec.length() > 0 && !"Default".equals(vidDec) && !Sage.rez("Default").equals(vidDec)) setVideoDecoderFilter0(pHandle, vidDec, null); } // Only use the selected audio decoder if it's MPEG1/AC3 audio audDec = ""; if (sage.media.format.MediaFormat.AC3.equals(audType) || sage.media.format.MediaFormat.MP2.equals(audType)) audDec = Sage.get(prefs + AUDIO_DECODER_FILTER, "SageTV MPEG Audio Decoder"); else { // Check for a format specific audio decoder if (audType.length() > 0) audDec = Sage.get(prefs + audType.toLowerCase() + "_" + AUDIO_DECODER_FILTER, ""); } if (audDec.length() > 0 && !"Default".equals(audDec) && !Sage.rez("Default").equals(audDec)) setAudioDecoderFilter0(pHandle, audDec, null); else if (sage.media.format.MediaFormat.DTS.equalsIgnoreCase(audType) || "DCA".equalsIgnoreCase(audType)) setAudioDecoderFilter0(pHandle, "AC3Filter", null); } public boolean playControlEx(int playCode, long param1, long param2) throws PlaybackException { if (super.playControlEx(playCode, param1, param2)) return true; if (currState == PLAY_STATE || currState == PAUSE_STATE || currState == LOADED_STATE) { if (bdp != null && playCode == VideoFrame.DVD_CONTROL_CHAPTER_NEXT) { if (getDVDChapter() < getDVDTotalChapters()) { long newTime = getChapterStartMsec(lastTargetChapter = (getDVDChapter() + 1)); if (Sage.DBG) System.out.println("Next chapter for BluRay seeking to " + newTime); seek(newTime); lastChapterSeekTime = Sage.eventTime(); } } else if (bdp != null && playCode == VideoFrame.DVD_CONTROL_CHAPTER_PREV) { int currChapter = getDVDChapter(); if (getMediaTimeMillis() - getChapterStartMsec(currChapter) > 7000 || currChapter == 1) { if (Sage.DBG) System.out.println("Prev chapter (restart curr chapter) for BluRay"); seek(getChapterStartMsec(currChapter)); } else { long newTime = getChapterStartMsec(lastTargetChapter = (currChapter - 1)); if (Sage.DBG) System.out.println("Prev chapter for BluRay seeking to " + newTime); seek(newTime); lastChapterSeekTime = Sage.eventTime(); } } else if (bdp != null && playCode == VideoFrame.DVD_CONTROL_CHAPTER_SET) { long newTime = getChapterStartMsec(lastTargetChapter = (int)param1); if (Sage.DBG) System.out.println("Set chapter (" + param1 + ") for BluRay seeking to " + newTime); seek(newTime); lastChapterSeekTime = Sage.eventTime(); } else if (bdp != null && playCode == VideoFrame.DVD_CONTROL_ANGLE_CHANGE) { /*if (getDVDTotalAngles() > 1) { currBDAngle++; if (currBDAngle > getDVDTotalAngles()) currBDAngle = 1; if (Sage.DBG) System.out.println("Setting BluRay Angle to be " + currBDAngle); // Lock the pusher so we can change the file source synchronized (this) { addYieldDecoderLock(); synchronized (decoderLock) { bdp.setAngle(currBDAngle); seek(getMediaTimeMillis()); } } }*/ } else if (bdp != null && playCode == VideoFrame.DVD_CONTROL_TITLE_SET) { if (param1 > 0 && param1 <= bdp.getNumPlaylists()) uiMgr.getVideoFrame().setBluRayTargetTitle((int)param1); else uiMgr.getVideoFrame().playbackControl(0); } } return false; } public int getDVDChapter() { if (bdp != null) return (Sage.eventTime() - lastChapterSeekTime < 1500) ? lastTargetChapter : getChapterForMsec(getMediaTimeMillis()); return 0; } public int getDVDTotalChapters() { if (bdp != null && chapterOffsets != null) return chapterOffsets.length; return 0; } public int getDVDDomain() { if (bdp != null) return 4; // We're always in the movie for BluRays return 0; } public int getDVDAngle() { return (bdp != null) ? 1 : 0; } public int getDVDTitle() { return (bdp != null) ? currBDTitle : 0; } public int getDVDTotalAngles() { return (bdp != null) ? 1 : 0; } public int getDVDTotalTitles() { return (bdp != null) ? bdp.getNumPlaylists() : 0; } public String getBluRayTitleDesc(int titleNum) { return (bdp != null) ? bdp.getPlaylistDesc(titleNum - 1) : ""; } private long getChapterStartMsec(int chapter) { if (chapterOffsets == null) return 0; return chapterOffsets[Math.max(0, Math.min(chapter - 1, chapterOffsets.length - 1))] / 45; } private int getChapterForMsec(long msec) { if (chapterOffsets == null) return 0; long pts45 = msec * 45; for (int i = 0; i < chapterOffsets.length; i++) if (chapterOffsets[i] > pts45) return i; return chapterOffsets.length; } protected sage.media.format.ContainerFormat getCurrFormat() { if (bdp != null) { return bdp.getFileFormat(currBDTitle - 1); } return super.getCurrFormat(); } protected native void switchLoadTVFile0(long ptr, String filePath, String hostname, boolean waitUntilDone) throws PlaybackException; // To handle frame stepping on the PVR350 TV Output protected native boolean frameStep0(long ptr, int amount); protected native void addHWDecoderFilter0(long ptr, String filterName, boolean hwDecodeOnly) throws PlaybackException; // createGraph0 will create the peer native object and create the initial filter graph protected native long createGraph0() throws PlaybackException; protected native void setupGraphMultifile0(long ptr, String[] filePaths, String remoteHost, boolean renderVideo, boolean renderAudio) throws PlaybackException; protected UIManager uiMgr; private int oldElecardRegistryValue; private boolean changedElecardRegistry; protected java.io.File unmountRequired; protected long[] chapterOffsets; // 45kHz protected sage.media.bluray.BluRayParser bdp; protected long lastChapterSeekTime; protected int lastTargetChapter; protected int currBDTitle; }
apache/tomee
37,374
container/openejb-core/src/main/java/org/apache/openejb/core/cmp/CmpContainer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openejb.core.cmp; import org.apache.openejb.ApplicationException; import org.apache.openejb.BeanContext; import org.apache.openejb.ContainerType; import org.apache.openejb.InterfaceType; import org.apache.openejb.OpenEJBException; import org.apache.openejb.ProxyInfo; import org.apache.openejb.RpcContainer; import org.apache.openejb.core.ExceptionType; import org.apache.openejb.core.Operation; import org.apache.openejb.core.ThreadContext; import org.apache.openejb.core.entity.EntityContext; import org.apache.openejb.core.entity.EntrancyTracker; import org.apache.openejb.core.timer.EjbTimerService; import org.apache.openejb.core.timer.EjbTimerServiceImpl; import org.apache.openejb.core.transaction.TransactionPolicy; import org.apache.openejb.loader.SystemInstance; import org.apache.openejb.spi.SecurityService; import org.apache.openejb.util.Enumerator; import jakarta.ejb.EJBAccessException; import jakarta.ejb.EJBContext; import jakarta.ejb.EJBException; import jakarta.ejb.EJBHome; import jakarta.ejb.EJBLocalHome; import jakarta.ejb.EJBLocalObject; import jakarta.ejb.EJBObject; import jakarta.ejb.EntityBean; import jakarta.ejb.FinderException; import jakarta.ejb.ObjectNotFoundException; import jakarta.ejb.RemoveException; import jakarta.ejb.Timer; import jakarta.transaction.Synchronization; import jakarta.transaction.TransactionManager; import jakarta.transaction.TransactionSynchronizationRegistry; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.rmi.NoSuchObjectException; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.openejb.core.transaction.EjbTransactionUtil.afterInvoke; import static org.apache.openejb.core.transaction.EjbTransactionUtil.createTransactionPolicy; import static org.apache.openejb.core.transaction.EjbTransactionUtil.handleApplicationException; import static org.apache.openejb.core.transaction.EjbTransactionUtil.handleSystemException; /** * @org.apache.xbean.XBean element="cmpContainer" */ public class CmpContainer implements RpcContainer { protected final Object containerID; protected final SecurityService securityService; /** * Index used for getDeployments() and getDeploymentInfo(deploymentId). */ protected final Map<Object, BeanContext> deploymentsById = new HashMap<>(); /** * When events are fired from the CMP engine only an entity bean instance is returned. The type of the bean is used * to find the deployment info. This means that when the same type is used multiple ejb deployments a random deployment * will be selected to handle the ejb callback. */ protected final Map<Class, BeanContext> beansByClass = new HashMap<>(); /** * The CmpEngine which performs the actual persistence operations */ protected final CmpEngine cmpEngine; /** * Tracks entity instances that have been "entered" so we can throw reentrancy exceptions. */ protected EntrancyTracker entrancyTracker; protected TransactionSynchronizationRegistry synchronizationRegistry; private static final Object ENTITIES_TO_STORE = new Object() { public String toString() { return "EntitiesToStore"; } }; public CmpContainer(final Object id, final TransactionManager transactionManager, final SecurityService securityService, final String cmpEngineFactory) throws OpenEJBException { this.containerID = id; this.securityService = securityService; synchronizationRegistry = SystemInstance.get().getComponent(TransactionSynchronizationRegistry.class); entrancyTracker = new EntrancyTracker(synchronizationRegistry); // create the cmp engine instance ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { classLoader = getClass().getClassLoader(); } final CmpEngineFactory factory; try { final Class<?> cmpEngineFactoryClass = classLoader.loadClass(cmpEngineFactory); factory = (CmpEngineFactory) cmpEngineFactoryClass.newInstance(); } catch (final Exception e) { throw new OpenEJBException("Unable to create cmp engine factory " + cmpEngineFactory, e); } factory.setTransactionManager(transactionManager); factory.setTransactionSynchronizationRegistry(synchronizationRegistry); factory.setCmpCallback(new ContainerCmpCallback()); cmpEngine = factory.create(); } @Override public Object getContainerID() { return containerID; } @Override public ContainerType getContainerType() { return ContainerType.CMP_ENTITY; } @Override public synchronized BeanContext[] getBeanContexts() { return deploymentsById.values().toArray(new BeanContext[deploymentsById.size()]); } @Override public synchronized BeanContext getBeanContext(final Object deploymentID) { return deploymentsById.get(deploymentID); } private BeanContext getBeanContextByClass(Class type) { BeanContext beanContext = null; while (type != null && beanContext == null) { beanContext = beansByClass.get(type); type = type.getSuperclass(); } return beanContext; } @Override public void deploy(final BeanContext beanContext) throws OpenEJBException { synchronized (this) { final Object deploymentId = beanContext.getDeploymentID(); cmpEngine.deploy(beanContext); beanContext.setContainerData(cmpEngine); beanContext.set(EJBContext.class, new EntityContext(securityService)); // try to set deploymentInfo static field on bean implementation class try { final Field field = beanContext.getCmpImplClass().getField("deploymentInfo"); field.set(null, beanContext); } catch (final Exception e) { // ignore } // add to indexes deploymentsById.put(deploymentId, beanContext); beansByClass.put(beanContext.getCmpImplClass(), beanContext); beanContext.setContainer(this); } } @Override public void start(final BeanContext beanContext) throws OpenEJBException { final EjbTimerService timerService = beanContext.getEjbTimerService(); if (timerService != null) { timerService.start(); } } @Override public void stop(final BeanContext beanContext) throws OpenEJBException { beanContext.stop(); } @Override public void undeploy(final BeanContext beanContext) throws OpenEJBException { synchronized (this) { deploymentsById.remove(beanContext.getDeploymentID()); beansByClass.remove(beanContext.getCmpImplClass()); try { final Field field = beanContext.getCmpImplClass().getField("deploymentInfo"); field.set(null, null); } catch (final Exception e) { // ignore } beanContext.setContainer(null); beanContext.setContainerData(null); } } public Object getEjbInstance(final BeanContext beanContext, final Object primaryKey) { final ThreadContext callContext = new ThreadContext(beanContext, primaryKey); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { return cmpEngine.loadBean(callContext, primaryKey); } finally { ThreadContext.exit(oldCallContext); } } @Override public Object invoke(final Object deployID, InterfaceType type, final Class callInterface, final Method callMethod, final Object[] args, final Object primKey) throws OpenEJBException { final BeanContext beanContext = this.getBeanContext(deployID); if (beanContext == null) { throw new OpenEJBException("Deployment does not exist in this container. Deployment(id='" + deployID + "'), Container(id='" + containerID + "')"); } // Use the backup way to determine call type if null was supplied. if (type == null) { type = beanContext.getInterfaceType(callInterface); } final ThreadContext callContext = new ThreadContext(beanContext, primKey); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { final boolean authorized = securityService.isCallerAuthorized(callMethod, type); if (!authorized) { throw new ApplicationException(new EJBAccessException("Unauthorized Access by Principal Denied")); } final Class declaringClass = callMethod.getDeclaringClass(); final String methodName = callMethod.getName(); if (EJBHome.class.isAssignableFrom(declaringClass) || EJBLocalHome.class.isAssignableFrom(declaringClass)) { if (declaringClass != EJBHome.class && declaringClass != EJBLocalHome.class) { if (methodName.startsWith("create")) { return createEJBObject(callMethod, args, callContext, type); } else if (methodName.equals("findByPrimaryKey")) { return findByPrimaryKey(callMethod, args, callContext, type); } else if (methodName.startsWith("find")) { return findEJBObject(callMethod, args, callContext, type); } else { return homeMethod(callMethod, args, callContext, type); } } else if (methodName.equals("remove")) { removeEJBObject(callMethod, callContext, type); return null; } } else if ((EJBObject.class == declaringClass || EJBLocalObject.class == declaringClass) && methodName.equals("remove")) { removeEJBObject(callMethod, callContext, type); return null; } // business method callContext.setCurrentOperation(Operation.BUSINESS); final Method runMethod = beanContext.getMatchingBeanMethod(callMethod); callContext.set(Method.class, runMethod); return businessMethod(callMethod, runMethod, args, callContext, type); } finally { ThreadContext.exit(oldCallContext); } } private EntityBean createNewInstance(final ThreadContext callContext) { final BeanContext beanContext = callContext.getBeanContext(); try { return (EntityBean) beanContext.getCmpImplClass().newInstance(); } catch (final Exception e) { throw new EJBException("Unable to create new entity bean instance " + beanContext.getCmpImplClass(), e); } } private ThreadContext createThreadContext(final EntityBean entityBean) { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } final BeanContext beanContext = getBeanContextByClass(entityBean.getClass()); final KeyGenerator keyGenerator = beanContext.getKeyGenerator(); final Object primaryKey = keyGenerator.getPrimaryKey(entityBean); return new ThreadContext(beanContext, primaryKey); } private void setEntityContext(final EntityBean entityBean) { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } // activating entity doen't have a primary key final BeanContext beanContext = getBeanContextByClass(entityBean.getClass()); final ThreadContext callContext = new ThreadContext(beanContext, null); callContext.setCurrentOperation(Operation.SET_CONTEXT); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { entityBean.setEntityContext(new EntityContext(securityService)); } catch (final RemoteException e) { throw new EJBException(e); } finally { ThreadContext.exit(oldCallContext); } } private void unsetEntityContext(final EntityBean entityBean) { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } final ThreadContext callContext = createThreadContext(entityBean); callContext.setCurrentOperation(Operation.UNSET_CONTEXT); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { entityBean.unsetEntityContext(); } catch (final RemoteException e) { throw new EJBException(e); } finally { ThreadContext.exit(oldCallContext); } } private void ejbLoad(final EntityBean entityBean) { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } final ThreadContext callContext = createThreadContext(entityBean); callContext.setCurrentOperation(Operation.LOAD); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { entityBean.ejbLoad(); } catch (final RemoteException e) { throw new EJBException(e); } finally { ThreadContext.exit(oldCallContext); } // if we call load we must call store try { //noinspection unchecked Set<EntityBean> registeredEntities = (LinkedHashSet<EntityBean>) synchronizationRegistry.getResource(ENTITIES_TO_STORE); if (registeredEntities == null) { registeredEntities = new LinkedHashSet<>(); synchronizationRegistry.putResource(ENTITIES_TO_STORE, registeredEntities); synchronizationRegistry.registerInterposedSynchronization(new Synchronization() { @Override public void beforeCompletion() { //noinspection unchecked final Set<EntityBean> registeredEntities = (LinkedHashSet<EntityBean>) synchronizationRegistry.getResource(ENTITIES_TO_STORE); if (registeredEntities == null) { return; } for (final EntityBean entityBean : registeredEntities) { ejbStore(entityBean); } } @Override public void afterCompletion(final int i) { } }); } registeredEntities.add(entityBean); } catch (final Exception e) { // no-op } } private void ejbStore(final EntityBean entityBean) { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } final ThreadContext callContext = createThreadContext(entityBean); callContext.setCurrentOperation(Operation.STORE); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { entityBean.ejbStore(); } catch (final RemoteException e) { throw new EJBException(e); } finally { ThreadContext.exit(oldCallContext); } } private void ejbRemove(final EntityBean entityBean) throws RemoveException { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } if (isDeleted(entityBean)) { return; } final ThreadContext callContext = createThreadContext(entityBean); callContext.setCurrentOperation(Operation.REMOVE); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { entityBean.ejbRemove(); } catch (final RemoteException e) { throw new EJBException(e); } finally { // clear relationships // todo replace with interface call when CmpEntityBean interface is added try { entityBean.getClass().getMethod("OpenEJB_deleted").invoke(entityBean); } catch (final Exception ignored) { // no-op } cancelTimers(callContext); ThreadContext.exit(oldCallContext); } } private boolean isDeleted(final EntityBean entityBean) { try { return (Boolean) entityBean.getClass().getMethod("OpenEJB_isDeleted").invoke(entityBean); } catch (final NoSuchMethodException e) { return false; } catch (final Exception e) { throw new EJBException(e); } } private void ejbActivate(final EntityBean entityBean) { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } final ThreadContext callContext = createThreadContext(entityBean); callContext.setCurrentOperation(Operation.ACTIVATE); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { entityBean.ejbActivate(); } catch (final RemoteException e) { throw new EJBException(e); } finally { ThreadContext.exit(oldCallContext); } } private void ejbPassivate(final EntityBean entityBean) { if (entityBean == null) { throw new NullPointerException("entityBean is null"); } final ThreadContext callContext = createThreadContext(entityBean); callContext.setCurrentOperation(Operation.PASSIVATE); final ThreadContext oldCallContext = ThreadContext.enter(callContext); try { entityBean.ejbPassivate(); } catch (final RemoteException e) { throw new EJBException(e); } finally { ThreadContext.exit(oldCallContext); } } private Object businessMethod(final Method callMethod, final Method runMethod, final Object[] args, final ThreadContext callContext, final InterfaceType interfaceType) throws OpenEJBException { final BeanContext beanContext = callContext.getBeanContext(); final TransactionPolicy txPolicy = createTransactionPolicy(beanContext.getTransactionType(callMethod, interfaceType), callContext); final EntityBean bean; Object returnValue = null; entrancyTracker.enter(beanContext, callContext.getPrimaryKey()); try { bean = (EntityBean) cmpEngine.loadBean(callContext, callContext.getPrimaryKey()); if (bean == null) { throw new NoSuchObjectException(beanContext.getDeploymentID() + " : " + callContext.getPrimaryKey()); } returnValue = runMethod.invoke(bean, args); // when there is not transaction, merge the data from the bean back into the cmp engine cmpEngine.storeBeanIfNoTx(callContext, bean); } catch (final NoSuchObjectException e) { handleApplicationException(txPolicy, e, false); } catch (Throwable e) { if (e instanceof InvocationTargetException) { e = ((InvocationTargetException) e).getTargetException(); } final ExceptionType type = callContext.getBeanContext().getExceptionType(e); if (type == ExceptionType.SYSTEM) { /* System Exception ****************************/ handleSystemException(txPolicy, e, callContext); } else { /* Application Exception ***********************/ handleApplicationException(txPolicy, e, type == ExceptionType.APPLICATION_ROLLBACK); } } finally { entrancyTracker.exit(beanContext, callContext.getPrimaryKey()); afterInvoke(txPolicy, callContext); } return returnValue; } private Object homeMethod(final Method callMethod, final Object[] args, final ThreadContext callContext, final InterfaceType interfaceType) throws OpenEJBException { final BeanContext beanContext = callContext.getBeanContext(); final TransactionPolicy txPolicy = createTransactionPolicy(beanContext.getTransactionType(callMethod, interfaceType), callContext); final EntityBean bean; Object returnValue = null; try { /* Obtain a bean instance from the method ready pool */ bean = createNewInstance(callContext); // set the entity context setEntityContext(bean); try { callContext.setCurrentOperation(Operation.HOME); final Method runMethod = beanContext.getMatchingBeanMethod(callMethod); try { returnValue = runMethod.invoke(bean, args); } catch (final IllegalArgumentException e) { System.out.println("********************************************************"); System.out.println("callMethod = " + callMethod); System.out.println("runMethod = " + runMethod); System.out.println("bean = " + bean.getClass().getName()); throw e; } } finally { unsetEntityContext(bean); } } catch (Throwable e) { if (e instanceof InvocationTargetException) { e = ((InvocationTargetException) e).getTargetException(); } final ExceptionType type = callContext.getBeanContext().getExceptionType(e); if (type == ExceptionType.SYSTEM) { /* System Exception ****************************/ handleSystemException(txPolicy, e, callContext); } else { /* Application Exception ***********************/ handleApplicationException(txPolicy, e, type == ExceptionType.APPLICATION_ROLLBACK); } } finally { afterInvoke(txPolicy, callContext); } return returnValue; } private ProxyInfo createEJBObject(final Method callMethod, final Object[] args, final ThreadContext callContext, final InterfaceType interfaceType) throws OpenEJBException { final BeanContext beanContext = callContext.getBeanContext(); final TransactionPolicy txPolicy = createTransactionPolicy(beanContext.getTransactionType(callMethod, interfaceType), callContext); final EntityBean bean; Object primaryKey = null; try { // Obtain a bean instance from the method ready pool bean = createNewInstance(callContext); // set the entity context setEntityContext(bean); // Obtain the proper ejbCreate() method final Method ejbCreateMethod = beanContext.getMatchingBeanMethod(callMethod); // Set current operation for allowed operations callContext.setCurrentOperation(Operation.CREATE); // Invoke the proper ejbCreate() method on the instance ejbCreateMethod.invoke(bean, args); // create the new bean primaryKey = cmpEngine.createBean(bean, callContext); // determine post create callback method final Method ejbPostCreateMethod = beanContext.getMatchingPostCreateMethod(ejbCreateMethod); // create a new context containing the pk for the post create call final ThreadContext postCreateContext = new ThreadContext(beanContext, primaryKey); postCreateContext.setCurrentOperation(Operation.POST_CREATE); final ThreadContext oldContext = ThreadContext.enter(postCreateContext); try { // Invoke the ejbPostCreate method on the bean instance ejbPostCreateMethod.invoke(bean, args); // According to section 9.1.5.1 of the EJB 1.1 specification, the "ejbPostCreate(...) // method executes in the same transaction context as the previous ejbCreate(...) method." // // The bean is first insterted using db.create( ) and then after ejbPostCreate( ) its // updated using db.update(). This protocol allows for visablity of the bean after ejbCreate // within the current trasnaction. } finally { ThreadContext.exit(oldContext); } // when there is not transaction, merge the data from the bean back into the cmp engine cmpEngine.storeBeanIfNoTx(callContext, bean); } catch (Throwable e) { if (e instanceof InvocationTargetException) { e = ((InvocationTargetException) e).getTargetException(); } final ExceptionType type = callContext.getBeanContext().getExceptionType(e); if (type == ExceptionType.SYSTEM) { /* System Exception ****************************/ handleSystemException(txPolicy, e, callContext); } else { /* Application Exception ***********************/ handleApplicationException(txPolicy, e, type == ExceptionType.APPLICATION_ROLLBACK); } } finally { afterInvoke(txPolicy, callContext); } return new ProxyInfo(beanContext, primaryKey); } private Object findByPrimaryKey(final Method callMethod, final Object[] args, final ThreadContext callContext, final InterfaceType interfaceType) throws OpenEJBException { final BeanContext beanContext = callContext.getBeanContext(); final TransactionPolicy txPolicy = createTransactionPolicy(beanContext.getTransactionType(callMethod, interfaceType), callContext); try { final EntityBean bean = (EntityBean) cmpEngine.loadBean(callContext, args[0]); if (bean == null) { throw new ObjectNotFoundException(beanContext.getDeploymentID() + " : " + args[0]); } // rebuild the primary key final KeyGenerator kg = beanContext.getKeyGenerator(); final Object primaryKey = kg.getPrimaryKey(bean); // create a new ProxyInfo based on the deployment info and primary key return new ProxyInfo(beanContext, primaryKey); } catch (final FinderException fe) { handleApplicationException(txPolicy, fe, false); } catch (final Throwable e) {// handle reflection exception handleSystemException(txPolicy, e, callContext); } finally { afterInvoke(txPolicy, callContext); } throw new AssertionError("Should not get here"); } private Object findEJBObject(final Method callMethod, final Object[] args, final ThreadContext callContext, final InterfaceType interfaceType) throws OpenEJBException { final BeanContext beanContext = callContext.getBeanContext(); final TransactionPolicy txPolicy = createTransactionPolicy(beanContext.getTransactionType(callMethod, interfaceType), callContext); try { final List<Object> results = cmpEngine.queryBeans(callContext, callMethod, args); final KeyGenerator kg = beanContext.getKeyGenerator(); // The following block of code is responsible for returning ProxyInfo object(s) for each // matching entity bean found by the query. If its a multi-value find operation a Vector // of ProxyInfo objects will be returned. If its a single-value find operation then a // single ProxyInfo object is returned. if (callMethod.getReturnType() == Collection.class || callMethod.getReturnType() == Enumeration.class) { final List<ProxyInfo> proxies = new ArrayList<>(); for (final Object value : results) { final EntityBean bean = (EntityBean) value; if (value == null) { proxies.add(null); } else { // get the primary key final Object primaryKey = kg.getPrimaryKey(bean); // create a new ProxyInfo based on the deployment info and primary key and add it to the vector proxies.add(new ProxyInfo(beanContext, primaryKey)); } } if (callMethod.getReturnType() == Enumeration.class) { return new Enumerator(proxies); } else { return proxies; } } else { if (results.size() != 1) { throw new ObjectNotFoundException("A Enteprise bean with deployment_id = " + beanContext.getDeploymentID() + (args != null && args.length >= 1 ? " and primarykey = " + args[0] : "") + " Does not exist"); } // create a new ProxyInfo based on the deployment info and primary key final EntityBean bean = (EntityBean) results.get(0); if (bean == null) { return null; } else { final Object primaryKey = kg.getPrimaryKey(bean); return new ProxyInfo(beanContext, primaryKey); } } } catch (final FinderException fe) { handleApplicationException(txPolicy, fe, false); } catch (final Throwable e) {// handle reflection exception handleSystemException(txPolicy, e, callContext); } finally { afterInvoke(txPolicy, callContext); } throw new AssertionError("Should not get here"); } public Object select(final BeanContext beanContext, final String methodSignature, final String returnType, final Object... args) throws FinderException { final String signature = beanContext.getAbstractSchemaName() + "." + methodSignature; try { // execute the select query final Collection<Object> results = cmpEngine.queryBeans(beanContext, signature, args); // // process the results // // If we need to return a set... final Collection<Object> proxies; if (returnType.equals("java.util.Set")) { // we collect values into a LinkedHashSet to preserve ordering proxies = new LinkedHashSet<>(); } else { // otherwise use a simple array list proxies = new ArrayList<>(); } final boolean isSingleValued = !returnType.equals("java.util.Collection") && !returnType.equals("java.util.Set"); ProxyFactory proxyFactory = null; for (Object value : results) { // if this is a single valued query and we already have results, throw FinderException if (isSingleValued && !proxies.isEmpty()) { throw new FinderException("The single valued query " + methodSignature + "returned more than one item"); } // if we have an EntityBean, we need to proxy it if (value instanceof EntityBean) { final EntityBean entityBean = (EntityBean) value; if (proxyFactory == null) { final BeanContext result = getBeanContextByClass(entityBean.getClass()); if (result != null) { proxyFactory = new ProxyFactory(result); } } if (proxyFactory != null) { if (beanContext.isRemoteQueryResults(methodSignature)) { value = proxyFactory.createRemoteProxy(entityBean, this); } else { value = proxyFactory.createLocalProxy(entityBean, this); } } } proxies.add(value); } // if not single valued, return the set if (!isSingleValued) { return proxies; } // single valued query that returned no rows, is an exception if (proxies.isEmpty()) { throw new ObjectNotFoundException(); } // return the single item.... multiple return values was handled in for loop above return proxies.iterator().next(); } catch (final RuntimeException e) { throw new EJBException(e); } } public int update(final BeanContext beanContext, final String methodSignature, final Object... args) throws FinderException { final String signature = beanContext.getAbstractSchemaName() + "." + methodSignature; // exectue the update query return cmpEngine.executeUpdateQuery(beanContext, signature, args); } private void removeEJBObject(final Method callMethod, final ThreadContext callContext, final InterfaceType interfaceType) throws OpenEJBException { final BeanContext beanContext = callContext.getBeanContext(); final TransactionPolicy txPolicy = createTransactionPolicy(beanContext.getTransactionType(callMethod, interfaceType), callContext); try { final EntityBean entityBean = (EntityBean) cmpEngine.loadBean(callContext, callContext.getPrimaryKey()); if (entityBean == null) { throw new NoSuchObjectException(callContext.getBeanContext().getDeploymentID() + " " + callContext.getPrimaryKey()); } ejbRemove(entityBean); cmpEngine.removeBean(callContext); } catch (final NoSuchObjectException e) { handleApplicationException(txPolicy, e, false); } catch (final Throwable e) {// handle reflection exception handleSystemException(txPolicy, e, callContext); } finally { afterInvoke(txPolicy, callContext); } } private void cancelTimers(final ThreadContext threadContext) { final BeanContext beanContext = threadContext.getBeanContext(); final Object primaryKey = threadContext.getPrimaryKey(); // stop timers if (primaryKey != null && beanContext.getEjbTimerService() != null) { final EjbTimerService timerService = beanContext.getEjbTimerService(); if (timerService != null && timerService instanceof EjbTimerServiceImpl) { for (final Timer timer : beanContext.getEjbTimerService().getTimers(primaryKey)) { timer.cancel(); } } } } private class ContainerCmpCallback implements CmpCallback { @Override public void setEntityContext(final EntityBean entity) { CmpContainer.this.setEntityContext(entity); } @Override public void unsetEntityContext(final EntityBean entity) { CmpContainer.this.unsetEntityContext(entity); } @Override public void ejbActivate(final EntityBean entity) { CmpContainer.this.ejbActivate(entity); } @Override public void ejbPassivate(final EntityBean entity) { CmpContainer.this.ejbPassivate(entity); } @Override public void ejbLoad(final EntityBean entity) { CmpContainer.this.ejbLoad(entity); } @Override public void ejbStore(final EntityBean entity) { CmpContainer.this.ejbStore(entity); } @Override public void ejbRemove(final EntityBean entity) throws RemoveException { CmpContainer.this.ejbRemove(entity); } } }
googleapis/google-cloud-java
37,090
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/UploadedNpmPackage.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; /** * * * <pre> * An npm package uploaded to Artifact Registry using the NpmPackage * directive. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UploadedNpmPackage} */ public final class UploadedNpmPackage extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.UploadedNpmPackage) UploadedNpmPackageOrBuilder { private static final long serialVersionUID = 0L; // Use UploadedNpmPackage.newBuilder() to construct. private UploadedNpmPackage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UploadedNpmPackage() { uri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UploadedNpmPackage(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedNpmPackage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedNpmPackage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UploadedNpmPackage.class, com.google.cloudbuild.v1.UploadedNpmPackage.Builder.class); } private int bitField0_; public static final int URI_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object uri_ = ""; /** * * * <pre> * URI of the uploaded npm package. * </pre> * * <code>string uri = 1;</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * URI of the uploaded npm package. * </pre> * * <code>string uri = 1;</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILE_HASHES_FIELD_NUMBER = 2; private com.google.cloudbuild.v1.FileHashes fileHashes_; /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return Whether the fileHashes field is set. */ @java.lang.Override public boolean hasFileHashes() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return The fileHashes. */ @java.lang.Override public com.google.cloudbuild.v1.FileHashes getFileHashes() { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ @java.lang.Override public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashesOrBuilder() { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } public static final int PUSH_TIMING_FIELD_NUMBER = 3; private com.google.cloudbuild.v1.TimeSpan pushTiming_; /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the pushTiming field is set. */ @java.lang.Override public boolean hasPushTiming() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pushTiming. */ @java.lang.Override public com.google.cloudbuild.v1.TimeSpan getPushTiming() { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloudbuild.v1.TimeSpanOrBuilder getPushTimingOrBuilder() { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uri_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getFileHashes()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getPushTiming()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uri_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getFileHashes()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getPushTiming()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v1.UploadedNpmPackage)) { return super.equals(obj); } com.google.cloudbuild.v1.UploadedNpmPackage other = (com.google.cloudbuild.v1.UploadedNpmPackage) obj; if (!getUri().equals(other.getUri())) return false; if (hasFileHashes() != other.hasFileHashes()) return false; if (hasFileHashes()) { if (!getFileHashes().equals(other.getFileHashes())) return false; } if (hasPushTiming() != other.hasPushTiming()) return false; if (hasPushTiming()) { if (!getPushTiming().equals(other.getPushTiming())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); if (hasFileHashes()) { hash = (37 * hash) + FILE_HASHES_FIELD_NUMBER; hash = (53 * hash) + getFileHashes().hashCode(); } if (hasPushTiming()) { hash = (37 * hash) + PUSH_TIMING_FIELD_NUMBER; hash = (53 * hash) + getPushTiming().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedNpmPackage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v1.UploadedNpmPackage prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * An npm package uploaded to Artifact Registry using the NpmPackage * directive. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UploadedNpmPackage} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.UploadedNpmPackage) com.google.cloudbuild.v1.UploadedNpmPackageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedNpmPackage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedNpmPackage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UploadedNpmPackage.class, com.google.cloudbuild.v1.UploadedNpmPackage.Builder.class); } // Construct using com.google.cloudbuild.v1.UploadedNpmPackage.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFileHashesFieldBuilder(); getPushTimingFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; uri_ = ""; fileHashes_ = null; if (fileHashesBuilder_ != null) { fileHashesBuilder_.dispose(); fileHashesBuilder_ = null; } pushTiming_ = null; if (pushTimingBuilder_ != null) { pushTimingBuilder_.dispose(); pushTimingBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedNpmPackage_descriptor; } @java.lang.Override public com.google.cloudbuild.v1.UploadedNpmPackage getDefaultInstanceForType() { return com.google.cloudbuild.v1.UploadedNpmPackage.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v1.UploadedNpmPackage build() { com.google.cloudbuild.v1.UploadedNpmPackage result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v1.UploadedNpmPackage buildPartial() { com.google.cloudbuild.v1.UploadedNpmPackage result = new com.google.cloudbuild.v1.UploadedNpmPackage(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloudbuild.v1.UploadedNpmPackage result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.uri_ = uri_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.fileHashes_ = fileHashesBuilder_ == null ? fileHashes_ : fileHashesBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pushTiming_ = pushTimingBuilder_ == null ? pushTiming_ : pushTimingBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v1.UploadedNpmPackage) { return mergeFrom((com.google.cloudbuild.v1.UploadedNpmPackage) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v1.UploadedNpmPackage other) { if (other == com.google.cloudbuild.v1.UploadedNpmPackage.getDefaultInstance()) return this; if (!other.getUri().isEmpty()) { uri_ = other.uri_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasFileHashes()) { mergeFileHashes(other.getFileHashes()); } if (other.hasPushTiming()) { mergePushTiming(other.getPushTiming()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { uri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getFileHashesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getPushTimingFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object uri_ = ""; /** * * * <pre> * URI of the uploaded npm package. * </pre> * * <code>string uri = 1;</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URI of the uploaded npm package. * </pre> * * <code>string uri = 1;</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URI of the uploaded npm package. * </pre> * * <code>string uri = 1;</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * URI of the uploaded npm package. * </pre> * * <code>string uri = 1;</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * URI of the uploaded npm package. * </pre> * * <code>string uri = 1;</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloudbuild.v1.FileHashes fileHashes_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> fileHashesBuilder_; /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return Whether the fileHashes field is set. */ public boolean hasFileHashes() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return The fileHashes. */ public com.google.cloudbuild.v1.FileHashes getFileHashes() { if (fileHashesBuilder_ == null) { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } else { return fileHashesBuilder_.getMessage(); } } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder setFileHashes(com.google.cloudbuild.v1.FileHashes value) { if (fileHashesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } fileHashes_ = value; } else { fileHashesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder setFileHashes(com.google.cloudbuild.v1.FileHashes.Builder builderForValue) { if (fileHashesBuilder_ == null) { fileHashes_ = builderForValue.build(); } else { fileHashesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder mergeFileHashes(com.google.cloudbuild.v1.FileHashes value) { if (fileHashesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && fileHashes_ != null && fileHashes_ != com.google.cloudbuild.v1.FileHashes.getDefaultInstance()) { getFileHashesBuilder().mergeFrom(value); } else { fileHashes_ = value; } } else { fileHashesBuilder_.mergeFrom(value); } if (fileHashes_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder clearFileHashes() { bitField0_ = (bitField0_ & ~0x00000002); fileHashes_ = null; if (fileHashesBuilder_ != null) { fileHashesBuilder_.dispose(); fileHashesBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public com.google.cloudbuild.v1.FileHashes.Builder getFileHashesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getFileHashesFieldBuilder().getBuilder(); } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashesOrBuilder() { if (fileHashesBuilder_ != null) { return fileHashesBuilder_.getMessageOrBuilder(); } else { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } } /** * * * <pre> * Hash types and values of the npm package. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> getFileHashesFieldBuilder() { if (fileHashesBuilder_ == null) { fileHashesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder>( getFileHashes(), getParentForChildren(), isClean()); fileHashes_ = null; } return fileHashesBuilder_; } private com.google.cloudbuild.v1.TimeSpan pushTiming_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder> pushTimingBuilder_; /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the pushTiming field is set. */ public boolean hasPushTiming() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pushTiming. */ public com.google.cloudbuild.v1.TimeSpan getPushTiming() { if (pushTimingBuilder_ == null) { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } else { return pushTimingBuilder_.getMessage(); } } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setPushTiming(com.google.cloudbuild.v1.TimeSpan value) { if (pushTimingBuilder_ == null) { if (value == null) { throw new NullPointerException(); } pushTiming_ = value; } else { pushTimingBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setPushTiming(com.google.cloudbuild.v1.TimeSpan.Builder builderForValue) { if (pushTimingBuilder_ == null) { pushTiming_ = builderForValue.build(); } else { pushTimingBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergePushTiming(com.google.cloudbuild.v1.TimeSpan value) { if (pushTimingBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && pushTiming_ != null && pushTiming_ != com.google.cloudbuild.v1.TimeSpan.getDefaultInstance()) { getPushTimingBuilder().mergeFrom(value); } else { pushTiming_ = value; } } else { pushTimingBuilder_.mergeFrom(value); } if (pushTiming_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearPushTiming() { bitField0_ = (bitField0_ & ~0x00000004); pushTiming_ = null; if (pushTimingBuilder_ != null) { pushTimingBuilder_.dispose(); pushTimingBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloudbuild.v1.TimeSpan.Builder getPushTimingBuilder() { bitField0_ |= 0x00000004; onChanged(); return getPushTimingFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloudbuild.v1.TimeSpanOrBuilder getPushTimingOrBuilder() { if (pushTimingBuilder_ != null) { return pushTimingBuilder_.getMessageOrBuilder(); } else { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder> getPushTimingFieldBuilder() { if (pushTimingBuilder_ == null) { pushTimingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder>( getPushTiming(), getParentForChildren(), isClean()); pushTiming_ = null; } return pushTimingBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.UploadedNpmPackage) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.UploadedNpmPackage) private static final com.google.cloudbuild.v1.UploadedNpmPackage DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v1.UploadedNpmPackage(); } public static com.google.cloudbuild.v1.UploadedNpmPackage getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UploadedNpmPackage> PARSER = new com.google.protobuf.AbstractParser<UploadedNpmPackage>() { @java.lang.Override public UploadedNpmPackage parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UploadedNpmPackage> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UploadedNpmPackage> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v1.UploadedNpmPackage getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,097
java-artifact-registry/proto-google-cloud-artifact-registry-v1beta2/src/main/java/com/google/devtools/artifactregistry/v1beta2/ListPackagesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1beta2/package.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.artifactregistry.v1beta2; /** * * * <pre> * The response from listing packages. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListPackagesResponse} */ public final class ListPackagesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.ListPackagesResponse) ListPackagesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPackagesResponse.newBuilder() to construct. private ListPackagesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPackagesResponse() { packages_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPackagesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.PackageProto .internal_static_google_devtools_artifactregistry_v1beta2_ListPackagesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.PackageProto .internal_static_google_devtools_artifactregistry_v1beta2_ListPackagesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse.class, com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse.Builder.class); } public static final int PACKAGES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.devtools.artifactregistry.v1beta2.Package> packages_; /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ @java.lang.Override public java.util.List<com.google.devtools.artifactregistry.v1beta2.Package> getPackagesList() { return packages_; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.PackageOrBuilder> getPackagesOrBuilderList() { return packages_; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ @java.lang.Override public int getPackagesCount() { return packages_.size(); } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.Package getPackages(int index) { return packages_.get(index); } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.PackageOrBuilder getPackagesOrBuilder( int index) { return packages_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to retrieve the next page of packages, or empty if there are no * more packages to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The token to retrieve the next page of packages, or empty if there are no * more packages to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < packages_.size(); i++) { output.writeMessage(1, packages_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < packages_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, packages_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse other = (com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse) obj; if (!getPackagesList().equals(other.getPackagesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPackagesCount() > 0) { hash = (37 * hash) + PACKAGES_FIELD_NUMBER; hash = (53 * hash) + getPackagesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response from listing packages. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListPackagesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.ListPackagesResponse) com.google.devtools.artifactregistry.v1beta2.ListPackagesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.PackageProto .internal_static_google_devtools_artifactregistry_v1beta2_ListPackagesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.PackageProto .internal_static_google_devtools_artifactregistry_v1beta2_ListPackagesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse.class, com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse.Builder.class); } // Construct using // com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (packagesBuilder_ == null) { packages_ = java.util.Collections.emptyList(); } else { packages_ = null; packagesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1beta2.PackageProto .internal_static_google_devtools_artifactregistry_v1beta2_ListPackagesResponse_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse.getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse build() { com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse buildPartial() { com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse result = new com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse result) { if (packagesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { packages_ = java.util.Collections.unmodifiableList(packages_); bitField0_ = (bitField0_ & ~0x00000001); } result.packages_ = packages_; } else { result.packages_ = packagesBuilder_.build(); } } private void buildPartial0( com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse) { return mergeFrom((com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse other) { if (other == com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse.getDefaultInstance()) return this; if (packagesBuilder_ == null) { if (!other.packages_.isEmpty()) { if (packages_.isEmpty()) { packages_ = other.packages_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePackagesIsMutable(); packages_.addAll(other.packages_); } onChanged(); } } else { if (!other.packages_.isEmpty()) { if (packagesBuilder_.isEmpty()) { packagesBuilder_.dispose(); packagesBuilder_ = null; packages_ = other.packages_; bitField0_ = (bitField0_ & ~0x00000001); packagesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPackagesFieldBuilder() : null; } else { packagesBuilder_.addAllMessages(other.packages_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.devtools.artifactregistry.v1beta2.Package m = input.readMessage( com.google.devtools.artifactregistry.v1beta2.Package.parser(), extensionRegistry); if (packagesBuilder_ == null) { ensurePackagesIsMutable(); packages_.add(m); } else { packagesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.devtools.artifactregistry.v1beta2.Package> packages_ = java.util.Collections.emptyList(); private void ensurePackagesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { packages_ = new java.util.ArrayList<com.google.devtools.artifactregistry.v1beta2.Package>( packages_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.Package, com.google.devtools.artifactregistry.v1beta2.Package.Builder, com.google.devtools.artifactregistry.v1beta2.PackageOrBuilder> packagesBuilder_; /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public java.util.List<com.google.devtools.artifactregistry.v1beta2.Package> getPackagesList() { if (packagesBuilder_ == null) { return java.util.Collections.unmodifiableList(packages_); } else { return packagesBuilder_.getMessageList(); } } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public int getPackagesCount() { if (packagesBuilder_ == null) { return packages_.size(); } else { return packagesBuilder_.getCount(); } } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Package getPackages(int index) { if (packagesBuilder_ == null) { return packages_.get(index); } else { return packagesBuilder_.getMessage(index); } } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder setPackages( int index, com.google.devtools.artifactregistry.v1beta2.Package value) { if (packagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePackagesIsMutable(); packages_.set(index, value); onChanged(); } else { packagesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder setPackages( int index, com.google.devtools.artifactregistry.v1beta2.Package.Builder builderForValue) { if (packagesBuilder_ == null) { ensurePackagesIsMutable(); packages_.set(index, builderForValue.build()); onChanged(); } else { packagesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder addPackages(com.google.devtools.artifactregistry.v1beta2.Package value) { if (packagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePackagesIsMutable(); packages_.add(value); onChanged(); } else { packagesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder addPackages( int index, com.google.devtools.artifactregistry.v1beta2.Package value) { if (packagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePackagesIsMutable(); packages_.add(index, value); onChanged(); } else { packagesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder addPackages( com.google.devtools.artifactregistry.v1beta2.Package.Builder builderForValue) { if (packagesBuilder_ == null) { ensurePackagesIsMutable(); packages_.add(builderForValue.build()); onChanged(); } else { packagesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder addPackages( int index, com.google.devtools.artifactregistry.v1beta2.Package.Builder builderForValue) { if (packagesBuilder_ == null) { ensurePackagesIsMutable(); packages_.add(index, builderForValue.build()); onChanged(); } else { packagesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder addAllPackages( java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1beta2.Package> values) { if (packagesBuilder_ == null) { ensurePackagesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, packages_); onChanged(); } else { packagesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder clearPackages() { if (packagesBuilder_ == null) { packages_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { packagesBuilder_.clear(); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public Builder removePackages(int index) { if (packagesBuilder_ == null) { ensurePackagesIsMutable(); packages_.remove(index); onChanged(); } else { packagesBuilder_.remove(index); } return this; } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Package.Builder getPackagesBuilder( int index) { return getPackagesFieldBuilder().getBuilder(index); } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.PackageOrBuilder getPackagesOrBuilder( int index) { if (packagesBuilder_ == null) { return packages_.get(index); } else { return packagesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.PackageOrBuilder> getPackagesOrBuilderList() { if (packagesBuilder_ != null) { return packagesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(packages_); } } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Package.Builder addPackagesBuilder() { return getPackagesFieldBuilder() .addBuilder(com.google.devtools.artifactregistry.v1beta2.Package.getDefaultInstance()); } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Package.Builder addPackagesBuilder( int index) { return getPackagesFieldBuilder() .addBuilder( index, com.google.devtools.artifactregistry.v1beta2.Package.getDefaultInstance()); } /** * * * <pre> * The packages returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Package packages = 1;</code> */ public java.util.List<com.google.devtools.artifactregistry.v1beta2.Package.Builder> getPackagesBuilderList() { return getPackagesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.Package, com.google.devtools.artifactregistry.v1beta2.Package.Builder, com.google.devtools.artifactregistry.v1beta2.PackageOrBuilder> getPackagesFieldBuilder() { if (packagesBuilder_ == null) { packagesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.Package, com.google.devtools.artifactregistry.v1beta2.Package.Builder, com.google.devtools.artifactregistry.v1beta2.PackageOrBuilder>( packages_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); packages_ = null; } return packagesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to retrieve the next page of packages, or empty if there are no * more packages to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token to retrieve the next page of packages, or empty if there are no * more packages to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token to retrieve the next page of packages, or empty if there are no * more packages to return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The token to retrieve the next page of packages, or empty if there are no * more packages to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The token to retrieve the next page of packages, or empty if there are no * more packages to return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.ListPackagesResponse) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.ListPackagesResponse) private static final com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse(); } public static com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPackagesResponse> PARSER = new com.google.protobuf.AbstractParser<ListPackagesResponse>() { @java.lang.Override public ListPackagesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPackagesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPackagesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListPackagesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,097
java-artifact-registry/proto-google-cloud-artifact-registry-v1beta2/src/main/java/com/google/devtools/artifactregistry/v1beta2/ListVersionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/artifactregistry/v1beta2/version.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.artifactregistry.v1beta2; /** * * * <pre> * The response from listing versions. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListVersionsResponse} */ public final class ListVersionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.ListVersionsResponse) ListVersionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListVersionsResponse.newBuilder() to construct. private ListVersionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListVersionsResponse() { versions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListVersionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_ListVersionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_ListVersionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse.class, com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse.Builder.class); } public static final int VERSIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.devtools.artifactregistry.v1beta2.Version> versions_; /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ @java.lang.Override public java.util.List<com.google.devtools.artifactregistry.v1beta2.Version> getVersionsList() { return versions_; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.VersionOrBuilder> getVersionsOrBuilderList() { return versions_; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ @java.lang.Override public int getVersionsCount() { return versions_.size(); } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.Version getVersions(int index) { return versions_.get(index); } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.VersionOrBuilder getVersionsOrBuilder( int index) { return versions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to retrieve the next page of versions, or empty if there are no * more versions to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The token to retrieve the next page of versions, or empty if there are no * more versions to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < versions_.size(); i++) { output.writeMessage(1, versions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < versions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, versions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse)) { return super.equals(obj); } com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse other = (com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse) obj; if (!getVersionsList().equals(other.getVersionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getVersionsCount() > 0) { hash = (37 * hash) + VERSIONS_FIELD_NUMBER; hash = (53 * hash) + getVersionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response from listing versions. * </pre> * * Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListVersionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.ListVersionsResponse) com.google.devtools.artifactregistry.v1beta2.ListVersionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_ListVersionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_ListVersionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse.class, com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse.Builder.class); } // Construct using // com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (versionsBuilder_ == null) { versions_ = java.util.Collections.emptyList(); } else { versions_ = null; versionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.artifactregistry.v1beta2.VersionProto .internal_static_google_devtools_artifactregistry_v1beta2_ListVersionsResponse_descriptor; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse getDefaultInstanceForType() { return com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse build() { com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse buildPartial() { com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse result = new com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse result) { if (versionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { versions_ = java.util.Collections.unmodifiableList(versions_); bitField0_ = (bitField0_ & ~0x00000001); } result.versions_ = versions_; } else { result.versions_ = versionsBuilder_.build(); } } private void buildPartial0( com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse) { return mergeFrom((com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse other) { if (other == com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse.getDefaultInstance()) return this; if (versionsBuilder_ == null) { if (!other.versions_.isEmpty()) { if (versions_.isEmpty()) { versions_ = other.versions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureVersionsIsMutable(); versions_.addAll(other.versions_); } onChanged(); } } else { if (!other.versions_.isEmpty()) { if (versionsBuilder_.isEmpty()) { versionsBuilder_.dispose(); versionsBuilder_ = null; versions_ = other.versions_; bitField0_ = (bitField0_ & ~0x00000001); versionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getVersionsFieldBuilder() : null; } else { versionsBuilder_.addAllMessages(other.versions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.devtools.artifactregistry.v1beta2.Version m = input.readMessage( com.google.devtools.artifactregistry.v1beta2.Version.parser(), extensionRegistry); if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.add(m); } else { versionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.devtools.artifactregistry.v1beta2.Version> versions_ = java.util.Collections.emptyList(); private void ensureVersionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { versions_ = new java.util.ArrayList<com.google.devtools.artifactregistry.v1beta2.Version>( versions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.Version, com.google.devtools.artifactregistry.v1beta2.Version.Builder, com.google.devtools.artifactregistry.v1beta2.VersionOrBuilder> versionsBuilder_; /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public java.util.List<com.google.devtools.artifactregistry.v1beta2.Version> getVersionsList() { if (versionsBuilder_ == null) { return java.util.Collections.unmodifiableList(versions_); } else { return versionsBuilder_.getMessageList(); } } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public int getVersionsCount() { if (versionsBuilder_ == null) { return versions_.size(); } else { return versionsBuilder_.getCount(); } } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Version getVersions(int index) { if (versionsBuilder_ == null) { return versions_.get(index); } else { return versionsBuilder_.getMessage(index); } } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder setVersions( int index, com.google.devtools.artifactregistry.v1beta2.Version value) { if (versionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVersionsIsMutable(); versions_.set(index, value); onChanged(); } else { versionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder setVersions( int index, com.google.devtools.artifactregistry.v1beta2.Version.Builder builderForValue) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.set(index, builderForValue.build()); onChanged(); } else { versionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder addVersions(com.google.devtools.artifactregistry.v1beta2.Version value) { if (versionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVersionsIsMutable(); versions_.add(value); onChanged(); } else { versionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder addVersions( int index, com.google.devtools.artifactregistry.v1beta2.Version value) { if (versionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVersionsIsMutable(); versions_.add(index, value); onChanged(); } else { versionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder addVersions( com.google.devtools.artifactregistry.v1beta2.Version.Builder builderForValue) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.add(builderForValue.build()); onChanged(); } else { versionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder addVersions( int index, com.google.devtools.artifactregistry.v1beta2.Version.Builder builderForValue) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.add(index, builderForValue.build()); onChanged(); } else { versionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder addAllVersions( java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1beta2.Version> values) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, versions_); onChanged(); } else { versionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder clearVersions() { if (versionsBuilder_ == null) { versions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { versionsBuilder_.clear(); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public Builder removeVersions(int index) { if (versionsBuilder_ == null) { ensureVersionsIsMutable(); versions_.remove(index); onChanged(); } else { versionsBuilder_.remove(index); } return this; } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Version.Builder getVersionsBuilder( int index) { return getVersionsFieldBuilder().getBuilder(index); } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.VersionOrBuilder getVersionsOrBuilder( int index) { if (versionsBuilder_ == null) { return versions_.get(index); } else { return versionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.VersionOrBuilder> getVersionsOrBuilderList() { if (versionsBuilder_ != null) { return versionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(versions_); } } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Version.Builder addVersionsBuilder() { return getVersionsFieldBuilder() .addBuilder(com.google.devtools.artifactregistry.v1beta2.Version.getDefaultInstance()); } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public com.google.devtools.artifactregistry.v1beta2.Version.Builder addVersionsBuilder( int index) { return getVersionsFieldBuilder() .addBuilder( index, com.google.devtools.artifactregistry.v1beta2.Version.getDefaultInstance()); } /** * * * <pre> * The versions returned. * </pre> * * <code>repeated .google.devtools.artifactregistry.v1beta2.Version versions = 1;</code> */ public java.util.List<com.google.devtools.artifactregistry.v1beta2.Version.Builder> getVersionsBuilderList() { return getVersionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.Version, com.google.devtools.artifactregistry.v1beta2.Version.Builder, com.google.devtools.artifactregistry.v1beta2.VersionOrBuilder> getVersionsFieldBuilder() { if (versionsBuilder_ == null) { versionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.devtools.artifactregistry.v1beta2.Version, com.google.devtools.artifactregistry.v1beta2.Version.Builder, com.google.devtools.artifactregistry.v1beta2.VersionOrBuilder>( versions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); versions_ = null; } return versionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The token to retrieve the next page of versions, or empty if there are no * more versions to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The token to retrieve the next page of versions, or empty if there are no * more versions to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The token to retrieve the next page of versions, or empty if there are no * more versions to return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The token to retrieve the next page of versions, or empty if there are no * more versions to return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The token to retrieve the next page of versions, or empty if there are no * more versions to return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.ListVersionsResponse) } // @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.ListVersionsResponse) private static final com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse(); } public static com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListVersionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListVersionsResponse>() { @java.lang.Override public ListVersionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListVersionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListVersionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.artifactregistry.v1beta2.ListVersionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,128
java-orgpolicy/proto-google-cloud-orgpolicy-v2/src/main/java/com/google/cloud/orgpolicy/v2/ListPoliciesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/orgpolicy/v2/orgpolicy.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.orgpolicy.v2; /** * * * <pre> * The response returned from the [ListPolicies] * [google.cloud.orgpolicy.v2.OrgPolicy.ListPolicies] method. It will be empty * if no policies are set on the resource. * </pre> * * Protobuf type {@code google.cloud.orgpolicy.v2.ListPoliciesResponse} */ public final class ListPoliciesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.orgpolicy.v2.ListPoliciesResponse) ListPoliciesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPoliciesResponse.newBuilder() to construct. private ListPoliciesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPoliciesResponse() { policies_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPoliciesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListPoliciesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListPoliciesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orgpolicy.v2.ListPoliciesResponse.class, com.google.cloud.orgpolicy.v2.ListPoliciesResponse.Builder.class); } public static final int POLICIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.orgpolicy.v2.Policy> policies_; /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.orgpolicy.v2.Policy> getPoliciesList() { return policies_; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.orgpolicy.v2.PolicyOrBuilder> getPoliciesOrBuilderList() { return policies_; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ @java.lang.Override public int getPoliciesCount() { return policies_.size(); } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ @java.lang.Override public com.google.cloud.orgpolicy.v2.Policy getPolicies(int index) { return policies_.get(index); } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ @java.lang.Override public com.google.cloud.orgpolicy.v2.PolicyOrBuilder getPoliciesOrBuilder(int index) { return policies_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Page token used to retrieve the next page. This is currently not used, but * the server may at any point start supplying a valid token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used, but * the server may at any point start supplying a valid token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policies_.size(); i++) { output.writeMessage(1, policies_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policies_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, policies_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.orgpolicy.v2.ListPoliciesResponse)) { return super.equals(obj); } com.google.cloud.orgpolicy.v2.ListPoliciesResponse other = (com.google.cloud.orgpolicy.v2.ListPoliciesResponse) obj; if (!getPoliciesList().equals(other.getPoliciesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPoliciesCount() > 0) { hash = (37 * hash) + POLICIES_FIELD_NUMBER; hash = (53 * hash) + getPoliciesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.orgpolicy.v2.ListPoliciesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response returned from the [ListPolicies] * [google.cloud.orgpolicy.v2.OrgPolicy.ListPolicies] method. It will be empty * if no policies are set on the resource. * </pre> * * Protobuf type {@code google.cloud.orgpolicy.v2.ListPoliciesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.orgpolicy.v2.ListPoliciesResponse) com.google.cloud.orgpolicy.v2.ListPoliciesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListPoliciesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListPoliciesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orgpolicy.v2.ListPoliciesResponse.class, com.google.cloud.orgpolicy.v2.ListPoliciesResponse.Builder.class); } // Construct using com.google.cloud.orgpolicy.v2.ListPoliciesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policiesBuilder_ == null) { policies_ = java.util.Collections.emptyList(); } else { policies_ = null; policiesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListPoliciesResponse_descriptor; } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListPoliciesResponse getDefaultInstanceForType() { return com.google.cloud.orgpolicy.v2.ListPoliciesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListPoliciesResponse build() { com.google.cloud.orgpolicy.v2.ListPoliciesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListPoliciesResponse buildPartial() { com.google.cloud.orgpolicy.v2.ListPoliciesResponse result = new com.google.cloud.orgpolicy.v2.ListPoliciesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.orgpolicy.v2.ListPoliciesResponse result) { if (policiesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policies_ = java.util.Collections.unmodifiableList(policies_); bitField0_ = (bitField0_ & ~0x00000001); } result.policies_ = policies_; } else { result.policies_ = policiesBuilder_.build(); } } private void buildPartial0(com.google.cloud.orgpolicy.v2.ListPoliciesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.orgpolicy.v2.ListPoliciesResponse) { return mergeFrom((com.google.cloud.orgpolicy.v2.ListPoliciesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.orgpolicy.v2.ListPoliciesResponse other) { if (other == com.google.cloud.orgpolicy.v2.ListPoliciesResponse.getDefaultInstance()) return this; if (policiesBuilder_ == null) { if (!other.policies_.isEmpty()) { if (policies_.isEmpty()) { policies_ = other.policies_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePoliciesIsMutable(); policies_.addAll(other.policies_); } onChanged(); } } else { if (!other.policies_.isEmpty()) { if (policiesBuilder_.isEmpty()) { policiesBuilder_.dispose(); policiesBuilder_ = null; policies_ = other.policies_; bitField0_ = (bitField0_ & ~0x00000001); policiesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPoliciesFieldBuilder() : null; } else { policiesBuilder_.addAllMessages(other.policies_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.orgpolicy.v2.Policy m = input.readMessage( com.google.cloud.orgpolicy.v2.Policy.parser(), extensionRegistry); if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.add(m); } else { policiesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.orgpolicy.v2.Policy> policies_ = java.util.Collections.emptyList(); private void ensurePoliciesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policies_ = new java.util.ArrayList<com.google.cloud.orgpolicy.v2.Policy>(policies_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.orgpolicy.v2.Policy, com.google.cloud.orgpolicy.v2.Policy.Builder, com.google.cloud.orgpolicy.v2.PolicyOrBuilder> policiesBuilder_; /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public java.util.List<com.google.cloud.orgpolicy.v2.Policy> getPoliciesList() { if (policiesBuilder_ == null) { return java.util.Collections.unmodifiableList(policies_); } else { return policiesBuilder_.getMessageList(); } } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public int getPoliciesCount() { if (policiesBuilder_ == null) { return policies_.size(); } else { return policiesBuilder_.getCount(); } } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public com.google.cloud.orgpolicy.v2.Policy getPolicies(int index) { if (policiesBuilder_ == null) { return policies_.get(index); } else { return policiesBuilder_.getMessage(index); } } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder setPolicies(int index, com.google.cloud.orgpolicy.v2.Policy value) { if (policiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePoliciesIsMutable(); policies_.set(index, value); onChanged(); } else { policiesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder setPolicies( int index, com.google.cloud.orgpolicy.v2.Policy.Builder builderForValue) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.set(index, builderForValue.build()); onChanged(); } else { policiesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder addPolicies(com.google.cloud.orgpolicy.v2.Policy value) { if (policiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePoliciesIsMutable(); policies_.add(value); onChanged(); } else { policiesBuilder_.addMessage(value); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder addPolicies(int index, com.google.cloud.orgpolicy.v2.Policy value) { if (policiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePoliciesIsMutable(); policies_.add(index, value); onChanged(); } else { policiesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder addPolicies(com.google.cloud.orgpolicy.v2.Policy.Builder builderForValue) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.add(builderForValue.build()); onChanged(); } else { policiesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder addPolicies( int index, com.google.cloud.orgpolicy.v2.Policy.Builder builderForValue) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.add(index, builderForValue.build()); onChanged(); } else { policiesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder addAllPolicies( java.lang.Iterable<? extends com.google.cloud.orgpolicy.v2.Policy> values) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, policies_); onChanged(); } else { policiesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder clearPolicies() { if (policiesBuilder_ == null) { policies_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policiesBuilder_.clear(); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public Builder removePolicies(int index) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.remove(index); onChanged(); } else { policiesBuilder_.remove(index); } return this; } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public com.google.cloud.orgpolicy.v2.Policy.Builder getPoliciesBuilder(int index) { return getPoliciesFieldBuilder().getBuilder(index); } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public com.google.cloud.orgpolicy.v2.PolicyOrBuilder getPoliciesOrBuilder(int index) { if (policiesBuilder_ == null) { return policies_.get(index); } else { return policiesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public java.util.List<? extends com.google.cloud.orgpolicy.v2.PolicyOrBuilder> getPoliciesOrBuilderList() { if (policiesBuilder_ != null) { return policiesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policies_); } } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public com.google.cloud.orgpolicy.v2.Policy.Builder addPoliciesBuilder() { return getPoliciesFieldBuilder() .addBuilder(com.google.cloud.orgpolicy.v2.Policy.getDefaultInstance()); } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public com.google.cloud.orgpolicy.v2.Policy.Builder addPoliciesBuilder(int index) { return getPoliciesFieldBuilder() .addBuilder(index, com.google.cloud.orgpolicy.v2.Policy.getDefaultInstance()); } /** * * * <pre> * All policies that exist on the resource. It will be empty if no * policies are set. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Policy policies = 1;</code> */ public java.util.List<com.google.cloud.orgpolicy.v2.Policy.Builder> getPoliciesBuilderList() { return getPoliciesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.orgpolicy.v2.Policy, com.google.cloud.orgpolicy.v2.Policy.Builder, com.google.cloud.orgpolicy.v2.PolicyOrBuilder> getPoliciesFieldBuilder() { if (policiesBuilder_ == null) { policiesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.orgpolicy.v2.Policy, com.google.cloud.orgpolicy.v2.Policy.Builder, com.google.cloud.orgpolicy.v2.PolicyOrBuilder>( policies_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policies_ = null; } return policiesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Page token used to retrieve the next page. This is currently not used, but * the server may at any point start supplying a valid token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used, but * the server may at any point start supplying a valid token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used, but * the server may at any point start supplying a valid token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used, but * the server may at any point start supplying a valid token. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used, but * the server may at any point start supplying a valid token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.orgpolicy.v2.ListPoliciesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.orgpolicy.v2.ListPoliciesResponse) private static final com.google.cloud.orgpolicy.v2.ListPoliciesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.orgpolicy.v2.ListPoliciesResponse(); } public static com.google.cloud.orgpolicy.v2.ListPoliciesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPoliciesResponse> PARSER = new com.google.protobuf.AbstractParser<ListPoliciesResponse>() { @java.lang.Override public ListPoliciesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPoliciesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPoliciesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListPoliciesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,154
java-orgpolicy/proto-google-cloud-orgpolicy-v2/src/main/java/com/google/cloud/orgpolicy/v2/ListConstraintsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/orgpolicy/v2/orgpolicy.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.orgpolicy.v2; /** * * * <pre> * The response returned from the [ListConstraints] * [google.cloud.orgpolicy.v2.OrgPolicy.ListConstraints] method. * </pre> * * Protobuf type {@code google.cloud.orgpolicy.v2.ListConstraintsResponse} */ public final class ListConstraintsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.orgpolicy.v2.ListConstraintsResponse) ListConstraintsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListConstraintsResponse.newBuilder() to construct. private ListConstraintsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListConstraintsResponse() { constraints_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListConstraintsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListConstraintsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListConstraintsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orgpolicy.v2.ListConstraintsResponse.class, com.google.cloud.orgpolicy.v2.ListConstraintsResponse.Builder.class); } public static final int CONSTRAINTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.orgpolicy.v2.Constraint> constraints_; /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.orgpolicy.v2.Constraint> getConstraintsList() { return constraints_; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.orgpolicy.v2.ConstraintOrBuilder> getConstraintsOrBuilderList() { return constraints_; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ @java.lang.Override public int getConstraintsCount() { return constraints_.size(); } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ @java.lang.Override public com.google.cloud.orgpolicy.v2.Constraint getConstraints(int index) { return constraints_.get(index); } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ @java.lang.Override public com.google.cloud.orgpolicy.v2.ConstraintOrBuilder getConstraintsOrBuilder(int index) { return constraints_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Page token used to retrieve the next page. This is currently not used. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < constraints_.size(); i++) { output.writeMessage(1, constraints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < constraints_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, constraints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.orgpolicy.v2.ListConstraintsResponse)) { return super.equals(obj); } com.google.cloud.orgpolicy.v2.ListConstraintsResponse other = (com.google.cloud.orgpolicy.v2.ListConstraintsResponse) obj; if (!getConstraintsList().equals(other.getConstraintsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getConstraintsCount() > 0) { hash = (37 * hash) + CONSTRAINTS_FIELD_NUMBER; hash = (53 * hash) + getConstraintsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.orgpolicy.v2.ListConstraintsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response returned from the [ListConstraints] * [google.cloud.orgpolicy.v2.OrgPolicy.ListConstraints] method. * </pre> * * Protobuf type {@code google.cloud.orgpolicy.v2.ListConstraintsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.orgpolicy.v2.ListConstraintsResponse) com.google.cloud.orgpolicy.v2.ListConstraintsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListConstraintsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListConstraintsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orgpolicy.v2.ListConstraintsResponse.class, com.google.cloud.orgpolicy.v2.ListConstraintsResponse.Builder.class); } // Construct using com.google.cloud.orgpolicy.v2.ListConstraintsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (constraintsBuilder_ == null) { constraints_ = java.util.Collections.emptyList(); } else { constraints_ = null; constraintsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.orgpolicy.v2.OrgPolicyProto .internal_static_google_cloud_orgpolicy_v2_ListConstraintsResponse_descriptor; } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListConstraintsResponse getDefaultInstanceForType() { return com.google.cloud.orgpolicy.v2.ListConstraintsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListConstraintsResponse build() { com.google.cloud.orgpolicy.v2.ListConstraintsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListConstraintsResponse buildPartial() { com.google.cloud.orgpolicy.v2.ListConstraintsResponse result = new com.google.cloud.orgpolicy.v2.ListConstraintsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.orgpolicy.v2.ListConstraintsResponse result) { if (constraintsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { constraints_ = java.util.Collections.unmodifiableList(constraints_); bitField0_ = (bitField0_ & ~0x00000001); } result.constraints_ = constraints_; } else { result.constraints_ = constraintsBuilder_.build(); } } private void buildPartial0(com.google.cloud.orgpolicy.v2.ListConstraintsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.orgpolicy.v2.ListConstraintsResponse) { return mergeFrom((com.google.cloud.orgpolicy.v2.ListConstraintsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.orgpolicy.v2.ListConstraintsResponse other) { if (other == com.google.cloud.orgpolicy.v2.ListConstraintsResponse.getDefaultInstance()) return this; if (constraintsBuilder_ == null) { if (!other.constraints_.isEmpty()) { if (constraints_.isEmpty()) { constraints_ = other.constraints_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureConstraintsIsMutable(); constraints_.addAll(other.constraints_); } onChanged(); } } else { if (!other.constraints_.isEmpty()) { if (constraintsBuilder_.isEmpty()) { constraintsBuilder_.dispose(); constraintsBuilder_ = null; constraints_ = other.constraints_; bitField0_ = (bitField0_ & ~0x00000001); constraintsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getConstraintsFieldBuilder() : null; } else { constraintsBuilder_.addAllMessages(other.constraints_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.orgpolicy.v2.Constraint m = input.readMessage( com.google.cloud.orgpolicy.v2.Constraint.parser(), extensionRegistry); if (constraintsBuilder_ == null) { ensureConstraintsIsMutable(); constraints_.add(m); } else { constraintsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.orgpolicy.v2.Constraint> constraints_ = java.util.Collections.emptyList(); private void ensureConstraintsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { constraints_ = new java.util.ArrayList<com.google.cloud.orgpolicy.v2.Constraint>(constraints_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.orgpolicy.v2.Constraint, com.google.cloud.orgpolicy.v2.Constraint.Builder, com.google.cloud.orgpolicy.v2.ConstraintOrBuilder> constraintsBuilder_; /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public java.util.List<com.google.cloud.orgpolicy.v2.Constraint> getConstraintsList() { if (constraintsBuilder_ == null) { return java.util.Collections.unmodifiableList(constraints_); } else { return constraintsBuilder_.getMessageList(); } } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public int getConstraintsCount() { if (constraintsBuilder_ == null) { return constraints_.size(); } else { return constraintsBuilder_.getCount(); } } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public com.google.cloud.orgpolicy.v2.Constraint getConstraints(int index) { if (constraintsBuilder_ == null) { return constraints_.get(index); } else { return constraintsBuilder_.getMessage(index); } } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder setConstraints(int index, com.google.cloud.orgpolicy.v2.Constraint value) { if (constraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConstraintsIsMutable(); constraints_.set(index, value); onChanged(); } else { constraintsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder setConstraints( int index, com.google.cloud.orgpolicy.v2.Constraint.Builder builderForValue) { if (constraintsBuilder_ == null) { ensureConstraintsIsMutable(); constraints_.set(index, builderForValue.build()); onChanged(); } else { constraintsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder addConstraints(com.google.cloud.orgpolicy.v2.Constraint value) { if (constraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConstraintsIsMutable(); constraints_.add(value); onChanged(); } else { constraintsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder addConstraints(int index, com.google.cloud.orgpolicy.v2.Constraint value) { if (constraintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureConstraintsIsMutable(); constraints_.add(index, value); onChanged(); } else { constraintsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder addConstraints( com.google.cloud.orgpolicy.v2.Constraint.Builder builderForValue) { if (constraintsBuilder_ == null) { ensureConstraintsIsMutable(); constraints_.add(builderForValue.build()); onChanged(); } else { constraintsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder addConstraints( int index, com.google.cloud.orgpolicy.v2.Constraint.Builder builderForValue) { if (constraintsBuilder_ == null) { ensureConstraintsIsMutable(); constraints_.add(index, builderForValue.build()); onChanged(); } else { constraintsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder addAllConstraints( java.lang.Iterable<? extends com.google.cloud.orgpolicy.v2.Constraint> values) { if (constraintsBuilder_ == null) { ensureConstraintsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, constraints_); onChanged(); } else { constraintsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder clearConstraints() { if (constraintsBuilder_ == null) { constraints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { constraintsBuilder_.clear(); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public Builder removeConstraints(int index) { if (constraintsBuilder_ == null) { ensureConstraintsIsMutable(); constraints_.remove(index); onChanged(); } else { constraintsBuilder_.remove(index); } return this; } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public com.google.cloud.orgpolicy.v2.Constraint.Builder getConstraintsBuilder(int index) { return getConstraintsFieldBuilder().getBuilder(index); } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public com.google.cloud.orgpolicy.v2.ConstraintOrBuilder getConstraintsOrBuilder(int index) { if (constraintsBuilder_ == null) { return constraints_.get(index); } else { return constraintsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public java.util.List<? extends com.google.cloud.orgpolicy.v2.ConstraintOrBuilder> getConstraintsOrBuilderList() { if (constraintsBuilder_ != null) { return constraintsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(constraints_); } } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public com.google.cloud.orgpolicy.v2.Constraint.Builder addConstraintsBuilder() { return getConstraintsFieldBuilder() .addBuilder(com.google.cloud.orgpolicy.v2.Constraint.getDefaultInstance()); } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public com.google.cloud.orgpolicy.v2.Constraint.Builder addConstraintsBuilder(int index) { return getConstraintsFieldBuilder() .addBuilder(index, com.google.cloud.orgpolicy.v2.Constraint.getDefaultInstance()); } /** * * * <pre> * The collection of constraints that are available on the targeted resource. * </pre> * * <code>repeated .google.cloud.orgpolicy.v2.Constraint constraints = 1;</code> */ public java.util.List<com.google.cloud.orgpolicy.v2.Constraint.Builder> getConstraintsBuilderList() { return getConstraintsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.orgpolicy.v2.Constraint, com.google.cloud.orgpolicy.v2.Constraint.Builder, com.google.cloud.orgpolicy.v2.ConstraintOrBuilder> getConstraintsFieldBuilder() { if (constraintsBuilder_ == null) { constraintsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.orgpolicy.v2.Constraint, com.google.cloud.orgpolicy.v2.Constraint.Builder, com.google.cloud.orgpolicy.v2.ConstraintOrBuilder>( constraints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); constraints_ = null; } return constraintsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Page token used to retrieve the next page. This is currently not used. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Page token used to retrieve the next page. This is currently not used. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.orgpolicy.v2.ListConstraintsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.orgpolicy.v2.ListConstraintsResponse) private static final com.google.cloud.orgpolicy.v2.ListConstraintsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.orgpolicy.v2.ListConstraintsResponse(); } public static com.google.cloud.orgpolicy.v2.ListConstraintsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListConstraintsResponse> PARSER = new com.google.protobuf.AbstractParser<ListConstraintsResponse>() { @java.lang.Override public ListConstraintsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListConstraintsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListConstraintsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.orgpolicy.v2.ListConstraintsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,057
java-document-ai/proto-google-cloud-document-ai-v1beta3/src/main/java/com/google/cloud/documentai/v1beta3/Barcode.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1beta3/barcode.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1beta3; /** * * * <pre> * Encodes the detailed information of a barcode. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.Barcode} */ public final class Barcode extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.Barcode) BarcodeOrBuilder { private static final long serialVersionUID = 0L; // Use Barcode.newBuilder() to construct. private Barcode(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Barcode() { format_ = ""; valueFormat_ = ""; rawValue_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Barcode(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.BarcodeProto .internal_static_google_cloud_documentai_v1beta3_Barcode_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.BarcodeProto .internal_static_google_cloud_documentai_v1beta3_Barcode_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.Barcode.class, com.google.cloud.documentai.v1beta3.Barcode.Builder.class); } public static final int FORMAT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object format_ = ""; /** * * * <pre> * Format of a barcode. * The supported formats are: * * - `CODE_128`: Code 128 type. * - `CODE_39`: Code 39 type. * - `CODE_93`: Code 93 type. * - `CODABAR`: Codabar type. * - `DATA_MATRIX`: 2D Data Matrix type. * - `ITF`: ITF type. * - `EAN_13`: EAN-13 type. * - `EAN_8`: EAN-8 type. * - `QR_CODE`: 2D QR code type. * - `UPC_A`: UPC-A type. * - `UPC_E`: UPC-E type. * - `PDF417`: PDF417 type. * - `AZTEC`: 2D Aztec code type. * - `DATABAR`: GS1 DataBar code type. * </pre> * * <code>string format = 1;</code> * * @return The format. */ @java.lang.Override public java.lang.String getFormat() { java.lang.Object ref = format_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); format_ = s; return s; } } /** * * * <pre> * Format of a barcode. * The supported formats are: * * - `CODE_128`: Code 128 type. * - `CODE_39`: Code 39 type. * - `CODE_93`: Code 93 type. * - `CODABAR`: Codabar type. * - `DATA_MATRIX`: 2D Data Matrix type. * - `ITF`: ITF type. * - `EAN_13`: EAN-13 type. * - `EAN_8`: EAN-8 type. * - `QR_CODE`: 2D QR code type. * - `UPC_A`: UPC-A type. * - `UPC_E`: UPC-E type. * - `PDF417`: PDF417 type. * - `AZTEC`: 2D Aztec code type. * - `DATABAR`: GS1 DataBar code type. * </pre> * * <code>string format = 1;</code> * * @return The bytes for format. */ @java.lang.Override public com.google.protobuf.ByteString getFormatBytes() { java.lang.Object ref = format_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); format_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALUE_FORMAT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object valueFormat_ = ""; /** * * * <pre> * Value format describes the format of the value that a barcode * encodes. * The supported formats are: * * - `CONTACT_INFO`: Contact information. * - `EMAIL`: Email address. * - `ISBN`: ISBN identifier. * - `PHONE`: Phone number. * - `PRODUCT`: Product. * - `SMS`: SMS message. * - `TEXT`: Text string. * - `URL`: URL address. * - `WIFI`: Wifi information. * - `GEO`: Geo-localization. * - `CALENDAR_EVENT`: Calendar event. * - `DRIVER_LICENSE`: Driver's license. * </pre> * * <code>string value_format = 2;</code> * * @return The valueFormat. */ @java.lang.Override public java.lang.String getValueFormat() { java.lang.Object ref = valueFormat_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); valueFormat_ = s; return s; } } /** * * * <pre> * Value format describes the format of the value that a barcode * encodes. * The supported formats are: * * - `CONTACT_INFO`: Contact information. * - `EMAIL`: Email address. * - `ISBN`: ISBN identifier. * - `PHONE`: Phone number. * - `PRODUCT`: Product. * - `SMS`: SMS message. * - `TEXT`: Text string. * - `URL`: URL address. * - `WIFI`: Wifi information. * - `GEO`: Geo-localization. * - `CALENDAR_EVENT`: Calendar event. * - `DRIVER_LICENSE`: Driver's license. * </pre> * * <code>string value_format = 2;</code> * * @return The bytes for valueFormat. */ @java.lang.Override public com.google.protobuf.ByteString getValueFormatBytes() { java.lang.Object ref = valueFormat_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); valueFormat_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RAW_VALUE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object rawValue_ = ""; /** * * * <pre> * Raw value encoded in the barcode. * For example: `'MEBKM:TITLE:Google;URL:https://www.google.com;;'`. * </pre> * * <code>string raw_value = 3;</code> * * @return The rawValue. */ @java.lang.Override public java.lang.String getRawValue() { java.lang.Object ref = rawValue_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rawValue_ = s; return s; } } /** * * * <pre> * Raw value encoded in the barcode. * For example: `'MEBKM:TITLE:Google;URL:https://www.google.com;;'`. * </pre> * * <code>string raw_value = 3;</code> * * @return The bytes for rawValue. */ @java.lang.Override public com.google.protobuf.ByteString getRawValueBytes() { java.lang.Object ref = rawValue_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rawValue_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(format_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, format_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(valueFormat_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, valueFormat_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rawValue_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, rawValue_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(format_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, format_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(valueFormat_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, valueFormat_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rawValue_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, rawValue_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1beta3.Barcode)) { return super.equals(obj); } com.google.cloud.documentai.v1beta3.Barcode other = (com.google.cloud.documentai.v1beta3.Barcode) obj; if (!getFormat().equals(other.getFormat())) return false; if (!getValueFormat().equals(other.getValueFormat())) return false; if (!getRawValue().equals(other.getRawValue())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + FORMAT_FIELD_NUMBER; hash = (53 * hash) + getFormat().hashCode(); hash = (37 * hash) + VALUE_FORMAT_FIELD_NUMBER; hash = (53 * hash) + getValueFormat().hashCode(); hash = (37 * hash) + RAW_VALUE_FIELD_NUMBER; hash = (53 * hash) + getRawValue().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.Barcode parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.Barcode parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.Barcode parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.documentai.v1beta3.Barcode prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Encodes the detailed information of a barcode. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.Barcode} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.Barcode) com.google.cloud.documentai.v1beta3.BarcodeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.BarcodeProto .internal_static_google_cloud_documentai_v1beta3_Barcode_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.BarcodeProto .internal_static_google_cloud_documentai_v1beta3_Barcode_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.Barcode.class, com.google.cloud.documentai.v1beta3.Barcode.Builder.class); } // Construct using com.google.cloud.documentai.v1beta3.Barcode.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; format_ = ""; valueFormat_ = ""; rawValue_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1beta3.BarcodeProto .internal_static_google_cloud_documentai_v1beta3_Barcode_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1beta3.Barcode getDefaultInstanceForType() { return com.google.cloud.documentai.v1beta3.Barcode.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1beta3.Barcode build() { com.google.cloud.documentai.v1beta3.Barcode result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1beta3.Barcode buildPartial() { com.google.cloud.documentai.v1beta3.Barcode result = new com.google.cloud.documentai.v1beta3.Barcode(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.documentai.v1beta3.Barcode result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.format_ = format_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.valueFormat_ = valueFormat_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.rawValue_ = rawValue_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1beta3.Barcode) { return mergeFrom((com.google.cloud.documentai.v1beta3.Barcode) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1beta3.Barcode other) { if (other == com.google.cloud.documentai.v1beta3.Barcode.getDefaultInstance()) return this; if (!other.getFormat().isEmpty()) { format_ = other.format_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getValueFormat().isEmpty()) { valueFormat_ = other.valueFormat_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getRawValue().isEmpty()) { rawValue_ = other.rawValue_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { format_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { valueFormat_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { rawValue_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object format_ = ""; /** * * * <pre> * Format of a barcode. * The supported formats are: * * - `CODE_128`: Code 128 type. * - `CODE_39`: Code 39 type. * - `CODE_93`: Code 93 type. * - `CODABAR`: Codabar type. * - `DATA_MATRIX`: 2D Data Matrix type. * - `ITF`: ITF type. * - `EAN_13`: EAN-13 type. * - `EAN_8`: EAN-8 type. * - `QR_CODE`: 2D QR code type. * - `UPC_A`: UPC-A type. * - `UPC_E`: UPC-E type. * - `PDF417`: PDF417 type. * - `AZTEC`: 2D Aztec code type. * - `DATABAR`: GS1 DataBar code type. * </pre> * * <code>string format = 1;</code> * * @return The format. */ public java.lang.String getFormat() { java.lang.Object ref = format_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); format_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Format of a barcode. * The supported formats are: * * - `CODE_128`: Code 128 type. * - `CODE_39`: Code 39 type. * - `CODE_93`: Code 93 type. * - `CODABAR`: Codabar type. * - `DATA_MATRIX`: 2D Data Matrix type. * - `ITF`: ITF type. * - `EAN_13`: EAN-13 type. * - `EAN_8`: EAN-8 type. * - `QR_CODE`: 2D QR code type. * - `UPC_A`: UPC-A type. * - `UPC_E`: UPC-E type. * - `PDF417`: PDF417 type. * - `AZTEC`: 2D Aztec code type. * - `DATABAR`: GS1 DataBar code type. * </pre> * * <code>string format = 1;</code> * * @return The bytes for format. */ public com.google.protobuf.ByteString getFormatBytes() { java.lang.Object ref = format_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); format_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Format of a barcode. * The supported formats are: * * - `CODE_128`: Code 128 type. * - `CODE_39`: Code 39 type. * - `CODE_93`: Code 93 type. * - `CODABAR`: Codabar type. * - `DATA_MATRIX`: 2D Data Matrix type. * - `ITF`: ITF type. * - `EAN_13`: EAN-13 type. * - `EAN_8`: EAN-8 type. * - `QR_CODE`: 2D QR code type. * - `UPC_A`: UPC-A type. * - `UPC_E`: UPC-E type. * - `PDF417`: PDF417 type. * - `AZTEC`: 2D Aztec code type. * - `DATABAR`: GS1 DataBar code type. * </pre> * * <code>string format = 1;</code> * * @param value The format to set. * @return This builder for chaining. */ public Builder setFormat(java.lang.String value) { if (value == null) { throw new NullPointerException(); } format_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Format of a barcode. * The supported formats are: * * - `CODE_128`: Code 128 type. * - `CODE_39`: Code 39 type. * - `CODE_93`: Code 93 type. * - `CODABAR`: Codabar type. * - `DATA_MATRIX`: 2D Data Matrix type. * - `ITF`: ITF type. * - `EAN_13`: EAN-13 type. * - `EAN_8`: EAN-8 type. * - `QR_CODE`: 2D QR code type. * - `UPC_A`: UPC-A type. * - `UPC_E`: UPC-E type. * - `PDF417`: PDF417 type. * - `AZTEC`: 2D Aztec code type. * - `DATABAR`: GS1 DataBar code type. * </pre> * * <code>string format = 1;</code> * * @return This builder for chaining. */ public Builder clearFormat() { format_ = getDefaultInstance().getFormat(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Format of a barcode. * The supported formats are: * * - `CODE_128`: Code 128 type. * - `CODE_39`: Code 39 type. * - `CODE_93`: Code 93 type. * - `CODABAR`: Codabar type. * - `DATA_MATRIX`: 2D Data Matrix type. * - `ITF`: ITF type. * - `EAN_13`: EAN-13 type. * - `EAN_8`: EAN-8 type. * - `QR_CODE`: 2D QR code type. * - `UPC_A`: UPC-A type. * - `UPC_E`: UPC-E type. * - `PDF417`: PDF417 type. * - `AZTEC`: 2D Aztec code type. * - `DATABAR`: GS1 DataBar code type. * </pre> * * <code>string format = 1;</code> * * @param value The bytes for format to set. * @return This builder for chaining. */ public Builder setFormatBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); format_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object valueFormat_ = ""; /** * * * <pre> * Value format describes the format of the value that a barcode * encodes. * The supported formats are: * * - `CONTACT_INFO`: Contact information. * - `EMAIL`: Email address. * - `ISBN`: ISBN identifier. * - `PHONE`: Phone number. * - `PRODUCT`: Product. * - `SMS`: SMS message. * - `TEXT`: Text string. * - `URL`: URL address. * - `WIFI`: Wifi information. * - `GEO`: Geo-localization. * - `CALENDAR_EVENT`: Calendar event. * - `DRIVER_LICENSE`: Driver's license. * </pre> * * <code>string value_format = 2;</code> * * @return The valueFormat. */ public java.lang.String getValueFormat() { java.lang.Object ref = valueFormat_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); valueFormat_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Value format describes the format of the value that a barcode * encodes. * The supported formats are: * * - `CONTACT_INFO`: Contact information. * - `EMAIL`: Email address. * - `ISBN`: ISBN identifier. * - `PHONE`: Phone number. * - `PRODUCT`: Product. * - `SMS`: SMS message. * - `TEXT`: Text string. * - `URL`: URL address. * - `WIFI`: Wifi information. * - `GEO`: Geo-localization. * - `CALENDAR_EVENT`: Calendar event. * - `DRIVER_LICENSE`: Driver's license. * </pre> * * <code>string value_format = 2;</code> * * @return The bytes for valueFormat. */ public com.google.protobuf.ByteString getValueFormatBytes() { java.lang.Object ref = valueFormat_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); valueFormat_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Value format describes the format of the value that a barcode * encodes. * The supported formats are: * * - `CONTACT_INFO`: Contact information. * - `EMAIL`: Email address. * - `ISBN`: ISBN identifier. * - `PHONE`: Phone number. * - `PRODUCT`: Product. * - `SMS`: SMS message. * - `TEXT`: Text string. * - `URL`: URL address. * - `WIFI`: Wifi information. * - `GEO`: Geo-localization. * - `CALENDAR_EVENT`: Calendar event. * - `DRIVER_LICENSE`: Driver's license. * </pre> * * <code>string value_format = 2;</code> * * @param value The valueFormat to set. * @return This builder for chaining. */ public Builder setValueFormat(java.lang.String value) { if (value == null) { throw new NullPointerException(); } valueFormat_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Value format describes the format of the value that a barcode * encodes. * The supported formats are: * * - `CONTACT_INFO`: Contact information. * - `EMAIL`: Email address. * - `ISBN`: ISBN identifier. * - `PHONE`: Phone number. * - `PRODUCT`: Product. * - `SMS`: SMS message. * - `TEXT`: Text string. * - `URL`: URL address. * - `WIFI`: Wifi information. * - `GEO`: Geo-localization. * - `CALENDAR_EVENT`: Calendar event. * - `DRIVER_LICENSE`: Driver's license. * </pre> * * <code>string value_format = 2;</code> * * @return This builder for chaining. */ public Builder clearValueFormat() { valueFormat_ = getDefaultInstance().getValueFormat(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Value format describes the format of the value that a barcode * encodes. * The supported formats are: * * - `CONTACT_INFO`: Contact information. * - `EMAIL`: Email address. * - `ISBN`: ISBN identifier. * - `PHONE`: Phone number. * - `PRODUCT`: Product. * - `SMS`: SMS message. * - `TEXT`: Text string. * - `URL`: URL address. * - `WIFI`: Wifi information. * - `GEO`: Geo-localization. * - `CALENDAR_EVENT`: Calendar event. * - `DRIVER_LICENSE`: Driver's license. * </pre> * * <code>string value_format = 2;</code> * * @param value The bytes for valueFormat to set. * @return This builder for chaining. */ public Builder setValueFormatBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); valueFormat_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object rawValue_ = ""; /** * * * <pre> * Raw value encoded in the barcode. * For example: `'MEBKM:TITLE:Google;URL:https://www.google.com;;'`. * </pre> * * <code>string raw_value = 3;</code> * * @return The rawValue. */ public java.lang.String getRawValue() { java.lang.Object ref = rawValue_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rawValue_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Raw value encoded in the barcode. * For example: `'MEBKM:TITLE:Google;URL:https://www.google.com;;'`. * </pre> * * <code>string raw_value = 3;</code> * * @return The bytes for rawValue. */ public com.google.protobuf.ByteString getRawValueBytes() { java.lang.Object ref = rawValue_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rawValue_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Raw value encoded in the barcode. * For example: `'MEBKM:TITLE:Google;URL:https://www.google.com;;'`. * </pre> * * <code>string raw_value = 3;</code> * * @param value The rawValue to set. * @return This builder for chaining. */ public Builder setRawValue(java.lang.String value) { if (value == null) { throw new NullPointerException(); } rawValue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Raw value encoded in the barcode. * For example: `'MEBKM:TITLE:Google;URL:https://www.google.com;;'`. * </pre> * * <code>string raw_value = 3;</code> * * @return This builder for chaining. */ public Builder clearRawValue() { rawValue_ = getDefaultInstance().getRawValue(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Raw value encoded in the barcode. * For example: `'MEBKM:TITLE:Google;URL:https://www.google.com;;'`. * </pre> * * <code>string raw_value = 3;</code> * * @param value The bytes for rawValue to set. * @return This builder for chaining. */ public Builder setRawValueBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); rawValue_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.Barcode) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.Barcode) private static final com.google.cloud.documentai.v1beta3.Barcode DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.Barcode(); } public static com.google.cloud.documentai.v1beta3.Barcode getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Barcode> PARSER = new com.google.protobuf.AbstractParser<Barcode>() { @java.lang.Override public Barcode parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Barcode> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Barcode> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1beta3.Barcode getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,154
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ListColumnDataProfilesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * List of profiles generated for a given organization or project. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ListColumnDataProfilesResponse} */ public final class ListColumnDataProfilesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ListColumnDataProfilesResponse) ListColumnDataProfilesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListColumnDataProfilesResponse.newBuilder() to construct. private ListColumnDataProfilesResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListColumnDataProfilesResponse() { columnDataProfiles_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListColumnDataProfilesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListColumnDataProfilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListColumnDataProfilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ListColumnDataProfilesResponse.class, com.google.privacy.dlp.v2.ListColumnDataProfilesResponse.Builder.class); } public static final int COLUMN_DATA_PROFILES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.privacy.dlp.v2.ColumnDataProfile> columnDataProfiles_; /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ @java.lang.Override public java.util.List<com.google.privacy.dlp.v2.ColumnDataProfile> getColumnDataProfilesList() { return columnDataProfiles_; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.privacy.dlp.v2.ColumnDataProfileOrBuilder> getColumnDataProfilesOrBuilderList() { return columnDataProfiles_; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ @java.lang.Override public int getColumnDataProfilesCount() { return columnDataProfiles_.size(); } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.ColumnDataProfile getColumnDataProfiles(int index) { return columnDataProfiles_.get(index); } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.ColumnDataProfileOrBuilder getColumnDataProfilesOrBuilder( int index) { return columnDataProfiles_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The next page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < columnDataProfiles_.size(); i++) { output.writeMessage(1, columnDataProfiles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < columnDataProfiles_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, columnDataProfiles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.ListColumnDataProfilesResponse)) { return super.equals(obj); } com.google.privacy.dlp.v2.ListColumnDataProfilesResponse other = (com.google.privacy.dlp.v2.ListColumnDataProfilesResponse) obj; if (!getColumnDataProfilesList().equals(other.getColumnDataProfilesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getColumnDataProfilesCount() > 0) { hash = (37 * hash) + COLUMN_DATA_PROFILES_FIELD_NUMBER; hash = (53 * hash) + getColumnDataProfilesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.privacy.dlp.v2.ListColumnDataProfilesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List of profiles generated for a given organization or project. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ListColumnDataProfilesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ListColumnDataProfilesResponse) com.google.privacy.dlp.v2.ListColumnDataProfilesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListColumnDataProfilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListColumnDataProfilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ListColumnDataProfilesResponse.class, com.google.privacy.dlp.v2.ListColumnDataProfilesResponse.Builder.class); } // Construct using com.google.privacy.dlp.v2.ListColumnDataProfilesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (columnDataProfilesBuilder_ == null) { columnDataProfiles_ = java.util.Collections.emptyList(); } else { columnDataProfiles_ = null; columnDataProfilesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ListColumnDataProfilesResponse_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.ListColumnDataProfilesResponse getDefaultInstanceForType() { return com.google.privacy.dlp.v2.ListColumnDataProfilesResponse.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.ListColumnDataProfilesResponse build() { com.google.privacy.dlp.v2.ListColumnDataProfilesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.ListColumnDataProfilesResponse buildPartial() { com.google.privacy.dlp.v2.ListColumnDataProfilesResponse result = new com.google.privacy.dlp.v2.ListColumnDataProfilesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.privacy.dlp.v2.ListColumnDataProfilesResponse result) { if (columnDataProfilesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { columnDataProfiles_ = java.util.Collections.unmodifiableList(columnDataProfiles_); bitField0_ = (bitField0_ & ~0x00000001); } result.columnDataProfiles_ = columnDataProfiles_; } else { result.columnDataProfiles_ = columnDataProfilesBuilder_.build(); } } private void buildPartial0(com.google.privacy.dlp.v2.ListColumnDataProfilesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.ListColumnDataProfilesResponse) { return mergeFrom((com.google.privacy.dlp.v2.ListColumnDataProfilesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.ListColumnDataProfilesResponse other) { if (other == com.google.privacy.dlp.v2.ListColumnDataProfilesResponse.getDefaultInstance()) return this; if (columnDataProfilesBuilder_ == null) { if (!other.columnDataProfiles_.isEmpty()) { if (columnDataProfiles_.isEmpty()) { columnDataProfiles_ = other.columnDataProfiles_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureColumnDataProfilesIsMutable(); columnDataProfiles_.addAll(other.columnDataProfiles_); } onChanged(); } } else { if (!other.columnDataProfiles_.isEmpty()) { if (columnDataProfilesBuilder_.isEmpty()) { columnDataProfilesBuilder_.dispose(); columnDataProfilesBuilder_ = null; columnDataProfiles_ = other.columnDataProfiles_; bitField0_ = (bitField0_ & ~0x00000001); columnDataProfilesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getColumnDataProfilesFieldBuilder() : null; } else { columnDataProfilesBuilder_.addAllMessages(other.columnDataProfiles_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.privacy.dlp.v2.ColumnDataProfile m = input.readMessage( com.google.privacy.dlp.v2.ColumnDataProfile.parser(), extensionRegistry); if (columnDataProfilesBuilder_ == null) { ensureColumnDataProfilesIsMutable(); columnDataProfiles_.add(m); } else { columnDataProfilesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.privacy.dlp.v2.ColumnDataProfile> columnDataProfiles_ = java.util.Collections.emptyList(); private void ensureColumnDataProfilesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { columnDataProfiles_ = new java.util.ArrayList<com.google.privacy.dlp.v2.ColumnDataProfile>( columnDataProfiles_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.ColumnDataProfile, com.google.privacy.dlp.v2.ColumnDataProfile.Builder, com.google.privacy.dlp.v2.ColumnDataProfileOrBuilder> columnDataProfilesBuilder_; /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public java.util.List<com.google.privacy.dlp.v2.ColumnDataProfile> getColumnDataProfilesList() { if (columnDataProfilesBuilder_ == null) { return java.util.Collections.unmodifiableList(columnDataProfiles_); } else { return columnDataProfilesBuilder_.getMessageList(); } } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public int getColumnDataProfilesCount() { if (columnDataProfilesBuilder_ == null) { return columnDataProfiles_.size(); } else { return columnDataProfilesBuilder_.getCount(); } } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public com.google.privacy.dlp.v2.ColumnDataProfile getColumnDataProfiles(int index) { if (columnDataProfilesBuilder_ == null) { return columnDataProfiles_.get(index); } else { return columnDataProfilesBuilder_.getMessage(index); } } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder setColumnDataProfiles( int index, com.google.privacy.dlp.v2.ColumnDataProfile value) { if (columnDataProfilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnDataProfilesIsMutable(); columnDataProfiles_.set(index, value); onChanged(); } else { columnDataProfilesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder setColumnDataProfiles( int index, com.google.privacy.dlp.v2.ColumnDataProfile.Builder builderForValue) { if (columnDataProfilesBuilder_ == null) { ensureColumnDataProfilesIsMutable(); columnDataProfiles_.set(index, builderForValue.build()); onChanged(); } else { columnDataProfilesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder addColumnDataProfiles(com.google.privacy.dlp.v2.ColumnDataProfile value) { if (columnDataProfilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnDataProfilesIsMutable(); columnDataProfiles_.add(value); onChanged(); } else { columnDataProfilesBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder addColumnDataProfiles( int index, com.google.privacy.dlp.v2.ColumnDataProfile value) { if (columnDataProfilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureColumnDataProfilesIsMutable(); columnDataProfiles_.add(index, value); onChanged(); } else { columnDataProfilesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder addColumnDataProfiles( com.google.privacy.dlp.v2.ColumnDataProfile.Builder builderForValue) { if (columnDataProfilesBuilder_ == null) { ensureColumnDataProfilesIsMutable(); columnDataProfiles_.add(builderForValue.build()); onChanged(); } else { columnDataProfilesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder addColumnDataProfiles( int index, com.google.privacy.dlp.v2.ColumnDataProfile.Builder builderForValue) { if (columnDataProfilesBuilder_ == null) { ensureColumnDataProfilesIsMutable(); columnDataProfiles_.add(index, builderForValue.build()); onChanged(); } else { columnDataProfilesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder addAllColumnDataProfiles( java.lang.Iterable<? extends com.google.privacy.dlp.v2.ColumnDataProfile> values) { if (columnDataProfilesBuilder_ == null) { ensureColumnDataProfilesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, columnDataProfiles_); onChanged(); } else { columnDataProfilesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder clearColumnDataProfiles() { if (columnDataProfilesBuilder_ == null) { columnDataProfiles_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { columnDataProfilesBuilder_.clear(); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public Builder removeColumnDataProfiles(int index) { if (columnDataProfilesBuilder_ == null) { ensureColumnDataProfilesIsMutable(); columnDataProfiles_.remove(index); onChanged(); } else { columnDataProfilesBuilder_.remove(index); } return this; } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public com.google.privacy.dlp.v2.ColumnDataProfile.Builder getColumnDataProfilesBuilder( int index) { return getColumnDataProfilesFieldBuilder().getBuilder(index); } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public com.google.privacy.dlp.v2.ColumnDataProfileOrBuilder getColumnDataProfilesOrBuilder( int index) { if (columnDataProfilesBuilder_ == null) { return columnDataProfiles_.get(index); } else { return columnDataProfilesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public java.util.List<? extends com.google.privacy.dlp.v2.ColumnDataProfileOrBuilder> getColumnDataProfilesOrBuilderList() { if (columnDataProfilesBuilder_ != null) { return columnDataProfilesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(columnDataProfiles_); } } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public com.google.privacy.dlp.v2.ColumnDataProfile.Builder addColumnDataProfilesBuilder() { return getColumnDataProfilesFieldBuilder() .addBuilder(com.google.privacy.dlp.v2.ColumnDataProfile.getDefaultInstance()); } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public com.google.privacy.dlp.v2.ColumnDataProfile.Builder addColumnDataProfilesBuilder( int index) { return getColumnDataProfilesFieldBuilder() .addBuilder(index, com.google.privacy.dlp.v2.ColumnDataProfile.getDefaultInstance()); } /** * * * <pre> * List of data profiles. * </pre> * * <code>repeated .google.privacy.dlp.v2.ColumnDataProfile column_data_profiles = 1;</code> */ public java.util.List<com.google.privacy.dlp.v2.ColumnDataProfile.Builder> getColumnDataProfilesBuilderList() { return getColumnDataProfilesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.ColumnDataProfile, com.google.privacy.dlp.v2.ColumnDataProfile.Builder, com.google.privacy.dlp.v2.ColumnDataProfileOrBuilder> getColumnDataProfilesFieldBuilder() { if (columnDataProfilesBuilder_ == null) { columnDataProfilesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.privacy.dlp.v2.ColumnDataProfile, com.google.privacy.dlp.v2.ColumnDataProfile.Builder, com.google.privacy.dlp.v2.ColumnDataProfileOrBuilder>( columnDataProfiles_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); columnDataProfiles_ = null; } return columnDataProfilesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The next page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The next page token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The next page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The next page token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ListColumnDataProfilesResponse) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListColumnDataProfilesResponse) private static final com.google.privacy.dlp.v2.ListColumnDataProfilesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ListColumnDataProfilesResponse(); } public static com.google.privacy.dlp.v2.ListColumnDataProfilesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListColumnDataProfilesResponse> PARSER = new com.google.protobuf.AbstractParser<ListColumnDataProfilesResponse>() { @java.lang.Override public ListColumnDataProfilesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListColumnDataProfilesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListColumnDataProfilesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.ListColumnDataProfilesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/kafka
37,383
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/VersionedKeyValueStoreIntegrationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.integration; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster; import org.apache.kafka.streams.integration.utils.IntegrationTestUtils; import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.kstream.Produced; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.streams.processor.StateStoreContext; import org.apache.kafka.streams.processor.api.Processor; import org.apache.kafka.streams.processor.api.ProcessorContext; import org.apache.kafka.streams.processor.api.Record; import org.apache.kafka.streams.query.KeyQuery; import org.apache.kafka.streams.query.PositionBound; import org.apache.kafka.streams.query.Query; import org.apache.kafka.streams.query.QueryConfig; import org.apache.kafka.streams.query.QueryResult; import org.apache.kafka.streams.query.RangeQuery; import org.apache.kafka.streams.query.StateQueryRequest; import org.apache.kafka.streams.query.StateQueryResult; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.state.Stores; import org.apache.kafka.streams.state.TimestampedKeyValueStore; import org.apache.kafka.streams.state.ValueAndTimestamp; import org.apache.kafka.streams.state.VersionedBytesStoreSupplier; import org.apache.kafka.streams.state.VersionedKeyValueStore; import org.apache.kafka.streams.state.VersionedRecord; import org.apache.kafka.streams.state.internals.VersionedKeyValueToBytesStoreAdapter; import org.apache.kafka.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; import java.io.IOException; import java.time.Duration; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Properties; import static org.apache.kafka.streams.utils.TestUtils.safeUniqueTestName; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; @Tag("integration") public class VersionedKeyValueStoreIntegrationTest { private static final String STORE_NAME = "versioned-store"; private static final long HISTORY_RETENTION = 3600_000L; private String inputStream; private String outputStream; private long baseTimestamp; private KafkaStreams kafkaStreams; private static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(1); public TestInfo testInfo; @BeforeAll public static void before() throws IOException { CLUSTER.start(); } @AfterAll public static void after() { CLUSTER.stop(); } @BeforeEach public void beforeTest(final TestInfo testInfo) throws InterruptedException { this.testInfo = testInfo; final String uniqueTestName = safeUniqueTestName(testInfo); inputStream = "input-stream-" + uniqueTestName; outputStream = "output-stream-" + uniqueTestName; CLUSTER.createTopic(inputStream); CLUSTER.createTopic(outputStream); baseTimestamp = CLUSTER.time.milliseconds(); } @AfterEach public void afterTest() { if (kafkaStreams != null) { kafkaStreams.close(Duration.ofSeconds(30L)); kafkaStreams.cleanUp(); } } @Test public void shouldPutGetAndDelete() throws Exception { // build topology and start app final StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.versionedKeyValueStoreBuilder( Stores.persistentVersionedKeyValueStore(STORE_NAME, Duration.ofMillis(HISTORY_RETENTION)), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(() -> new VersionedStoreContentCheckerProcessor(true), STORE_NAME) .to(outputStream, Produced.with(Serdes.Integer(), Serdes.Integer())); final Properties props = props(); kafkaStreams = new KafkaStreams(streamsBuilder.build(), props); kafkaStreams.start(); // produce source data int numRecordsProduced = 0; numRecordsProduced += produceDataToTopic(inputStream, baseTimestamp, KeyValue.pair(1, "a0"), KeyValue.pair(2, "b0"), KeyValue.pair(3, null)); numRecordsProduced += produceDataToTopic(inputStream, baseTimestamp + 5, KeyValue.pair(1, "a5"), KeyValue.pair(2, null), KeyValue.pair(3, "c5")); numRecordsProduced += produceDataToTopic(inputStream, baseTimestamp + 2, KeyValue.pair(1, "a2"), KeyValue.pair(2, "b2"), KeyValue.pair(3, null)); // out-of-order data numRecordsProduced += produceDataToTopic(inputStream, baseTimestamp + 5, KeyValue.pair(1, "a5_new"), KeyValue.pair(2, "b5"), KeyValue.pair(3, null)); // replace existing records numRecordsProduced += produceDataToTopic(inputStream, baseTimestamp + 7, KeyValue.pair(1, DataTracker.DELETE_VALUE_KEYWORD), KeyValue.pair(2, DataTracker.DELETE_VALUE_KEYWORD), KeyValue.pair(3, DataTracker.DELETE_VALUE_KEYWORD)); // delete // wait for output and verify final List<KeyValue<Integer, Integer>> receivedRecords = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived( TestUtils.consumerConfig( CLUSTER.bootstrapServers(), IntegerDeserializer.class, IntegerDeserializer.class), outputStream, numRecordsProduced); for (final KeyValue<Integer, Integer> receivedRecord : receivedRecords) { // verify zero failed checks for each record assertThat(receivedRecord.value, equalTo(0)); } } @Test public void shouldSetChangelogTopicProperties() throws Exception { // build topology and start app final StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.versionedKeyValueStoreBuilder( Stores.persistentVersionedKeyValueStore(STORE_NAME, Duration.ofMillis(HISTORY_RETENTION)), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(() -> new VersionedStoreContentCheckerProcessor(false), STORE_NAME) .to(outputStream, Produced.with(Serdes.Integer(), Serdes.Integer())); final Properties props = props(); kafkaStreams = new KafkaStreams(streamsBuilder.build(), props); kafkaStreams.start(); // produce record (and wait for result) to create changelog produceDataToTopic(inputStream, baseTimestamp, KeyValue.pair(0, "foo")); IntegrationTestUtils.waitUntilMinRecordsReceived( TestUtils.consumerConfig( CLUSTER.bootstrapServers(), IntegerDeserializer.class, IntegerDeserializer.class), outputStream, 1); // verify changelog topic properties final String changelogTopic = props.getProperty(StreamsConfig.APPLICATION_ID_CONFIG) + "-versioned-store-changelog"; final Properties changelogTopicConfig = CLUSTER.getLogConfig(changelogTopic); assertThat(changelogTopicConfig.getProperty("cleanup.policy"), equalTo("compact")); assertThat(changelogTopicConfig.getProperty("min.compaction.lag.ms"), equalTo(Long.toString(HISTORY_RETENTION + 24 * 60 * 60 * 1000L))); } @Test public void shouldRestore() throws Exception { // build topology and start app StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.versionedKeyValueStoreBuilder( Stores.persistentVersionedKeyValueStore(STORE_NAME, Duration.ofMillis(HISTORY_RETENTION)), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(() -> new VersionedStoreContentCheckerProcessor(true), STORE_NAME) .to(outputStream, Produced.with(Serdes.Integer(), Serdes.Integer())); final Properties props = props(); kafkaStreams = new KafkaStreams(streamsBuilder.build(), props); kafkaStreams.start(); // produce source data and track in-memory to verify after restore final DataTracker data = new DataTracker(); int initialRecordsProduced = 0; initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp, KeyValue.pair(1, "a0"), KeyValue.pair(2, "b0"), KeyValue.pair(3, null)); initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp + 5, KeyValue.pair(1, "a5"), KeyValue.pair(2, null), KeyValue.pair(3, "c5")); initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp + 2, KeyValue.pair(1, "a2"), KeyValue.pair(2, "b2"), KeyValue.pair(3, null)); // out-of-order data initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp + 5, KeyValue.pair(1, "a5_new"), KeyValue.pair(2, "b5"), KeyValue.pair(3, null)); // replace existing records initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp + 7, KeyValue.pair(1, DataTracker.DELETE_VALUE_KEYWORD), KeyValue.pair(2, DataTracker.DELETE_VALUE_KEYWORD), KeyValue.pair(3, DataTracker.DELETE_VALUE_KEYWORD)); // delete initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp + 10, KeyValue.pair(1, "a10"), KeyValue.pair(2, "b10"), KeyValue.pair(3, "c10")); // new data so latest is not tombstone // wait for output IntegrationTestUtils.waitUntilMinRecordsReceived( TestUtils.consumerConfig( CLUSTER.bootstrapServers(), IntegerDeserializer.class, IntegerDeserializer.class), outputStream, initialRecordsProduced); // wipe out state store to trigger restore process on restart kafkaStreams.close(); kafkaStreams.cleanUp(); // restart app and pass expected store contents to processor streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.versionedKeyValueStoreBuilder( Stores.persistentVersionedKeyValueStore(STORE_NAME, Duration.ofMillis(HISTORY_RETENTION)), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(() -> new VersionedStoreContentCheckerProcessor(true, data), STORE_NAME) .to(outputStream, Produced.with(Serdes.Integer(), Serdes.Integer())); kafkaStreams = new KafkaStreams(streamsBuilder.build(), props); kafkaStreams.start(); // produce additional records final int additionalRecordsProduced = produceDataToTopic(inputStream, baseTimestamp + 12, KeyValue.pair(1, "a12"), KeyValue.pair(2, "b12"), KeyValue.pair(3, "c12")); // wait for output and verify final List<KeyValue<Integer, Integer>> receivedRecords = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived( TestUtils.consumerConfig( CLUSTER.bootstrapServers(), IntegerDeserializer.class, IntegerDeserializer.class), outputStream, initialRecordsProduced + additionalRecordsProduced); for (final KeyValue<Integer, Integer> receivedRecord : receivedRecords) { // verify zero failed checks for each record assertThat(receivedRecord.value, equalTo(0)); } } @Test public void shouldAllowCustomIQv2ForCustomStoreImplementations() { // build topology and start app final StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.versionedKeyValueStoreBuilder( new CustomIQv2VersionedStoreSupplier(), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(() -> new VersionedStoreContentCheckerProcessor(false), STORE_NAME); final Properties props = props(); kafkaStreams = new KafkaStreams(streamsBuilder.build(), props); kafkaStreams.start(); // issue IQv2 query and verify result final StateQueryRequest<String> request = StateQueryRequest.inStore(STORE_NAME) .withQuery(new TestQuery()) .withPartitions(Collections.singleton(0)); final StateQueryResult<String> result = IntegrationTestUtils.iqv2WaitForResult(kafkaStreams, request); assertThat(result.getOnlyPartitionResult().getResult(), equalTo("success")); } @Test public void shouldManualUpgradeFromNonVersionedTimestampedToVersioned() throws Exception { // build non-versioned (timestamped) topology final StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.timestampedKeyValueStoreBuilder( Stores.persistentTimestampedKeyValueStore(STORE_NAME), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(TimestampedStoreContentCheckerProcessor::new, STORE_NAME) .to(outputStream, Produced.with(Serdes.Integer(), Serdes.Integer())); shouldManualUpgradeFromNonVersionedToVersioned(streamsBuilder.build()); } @Test public void shouldManualUpgradeFromNonVersionedNonTimestampedToVersioned() throws Exception { // build non-versioned (non-timestamped) topology final StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.keyValueStoreBuilder( Stores.persistentKeyValueStore(STORE_NAME), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(KeyValueStoreContentCheckerProcessor::new, STORE_NAME) .to(outputStream, Produced.with(Serdes.Integer(), Serdes.Integer())); shouldManualUpgradeFromNonVersionedToVersioned(streamsBuilder.build()); } private void shouldManualUpgradeFromNonVersionedToVersioned(final Topology originalTopology) throws Exception { // build original (non-versioned) topology and start app final Properties props = props(); // additional property to prevent premature compaction of older record versions while using timestamped store props.put(TopicConfig.MIN_COMPACTION_LAG_MS_CONFIG, 60_000L); kafkaStreams = new KafkaStreams(originalTopology, props); kafkaStreams.start(); // produce source data and track in-memory to verify after restore final DataTracker data = new DataTracker(); int initialRecordsProduced = 0; initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp, KeyValue.pair(1, "a0"), KeyValue.pair(2, "b0"), KeyValue.pair(3, null)); initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp + 5, KeyValue.pair(1, "a5"), KeyValue.pair(2, null), KeyValue.pair(3, "c5")); initialRecordsProduced += produceDataToTopic(inputStream, data, baseTimestamp + 2, KeyValue.pair(1, "a2"), KeyValue.pair(2, "b2"), KeyValue.pair(3, null)); // out-of-order data // wait for output and verify List<KeyValue<Integer, Integer>> receivedRecords = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived( TestUtils.consumerConfig( CLUSTER.bootstrapServers(), IntegerDeserializer.class, IntegerDeserializer.class), outputStream, initialRecordsProduced); for (final KeyValue<Integer, Integer> receivedRecord : receivedRecords) { // verify zero failed checks for each record assertThat(receivedRecord.value, equalTo(0)); } // wipe out state store to trigger restore process on restart kafkaStreams.close(); kafkaStreams.cleanUp(); // restart app with versioned store, and pass expected store contents to processor final StreamsBuilder streamsBuilder = new StreamsBuilder(); streamsBuilder .addStateStore( Stores.versionedKeyValueStoreBuilder( Stores.persistentVersionedKeyValueStore(STORE_NAME, Duration.ofMillis(HISTORY_RETENTION)), Serdes.Integer(), Serdes.String() ) ) .stream(inputStream, Consumed.with(Serdes.Integer(), Serdes.String())) .process(() -> new VersionedStoreContentCheckerProcessor(true, data), STORE_NAME) .to(outputStream, Produced.with(Serdes.Integer(), Serdes.Integer())); kafkaStreams = new KafkaStreams(streamsBuilder.build(), props); kafkaStreams.start(); // produce additional records final int additionalRecordsProduced = produceDataToTopic(inputStream, baseTimestamp + 12, KeyValue.pair(1, "a12"), KeyValue.pair(2, "b12"), KeyValue.pair(3, "c12")); // wait for output and verify receivedRecords = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived( TestUtils.consumerConfig( CLUSTER.bootstrapServers(), IntegerDeserializer.class, IntegerDeserializer.class), outputStream, initialRecordsProduced + additionalRecordsProduced); for (final KeyValue<Integer, Integer> receivedRecord : receivedRecords) { // verify zero failed checks for each record assertThat(receivedRecord.value, equalTo(0)); } } private Properties props() { final String safeTestName = safeUniqueTestName(testInfo); final Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "app-" + safeTestName); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath()); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000L); streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); return streamsConfiguration; } /** * @return number of records produced */ @SuppressWarnings("varargs") @SafeVarargs private final int produceDataToTopic(final String topic, final long timestamp, final KeyValue<Integer, String>... keyValues) { IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp( topic, Arrays.asList(keyValues), TestUtils.producerConfig(CLUSTER.bootstrapServers(), IntegerSerializer.class, StringSerializer.class), timestamp); return keyValues.length; } /** * @param topic topic to produce to * @param dataTracker map of key -> timestamp -> value for tracking data which is produced to * the topic. This method will add the produced data into this in-memory * tracker in addition to producing to the topic, in order to keep the two * in sync. * @param timestamp timestamp to produce with * @param keyValues key-value pairs to produce * * @return number of records produced */ @SuppressWarnings("varargs") @SafeVarargs private final int produceDataToTopic(final String topic, final DataTracker dataTracker, final long timestamp, final KeyValue<Integer, String>... keyValues) { produceDataToTopic(topic, timestamp, keyValues); for (final KeyValue<Integer, String> keyValue : keyValues) { dataTracker.add(keyValue.key, timestamp, keyValue.value); } return keyValues.length; } /** * Test-only processor for validating expected contents of a versioned store, and forwards * the number of failed checks downstream for consumption. Callers specify whether the * processor should also be responsible for inserting records into the store (while also * tracking them separately in-memory for use in validation). */ private static class VersionedStoreContentCheckerProcessor implements Processor<Integer, String, Integer, Integer> { private ProcessorContext<Integer, Integer> context; private VersionedKeyValueStore<Integer, String> store; // whether or not the processor should write records to the store as they arrive. // must be false for global stores. private final boolean writeToStore; // in-memory copy of seen data, to validate for testing purposes. private final DataTracker data; /** * @param writeToStore whether or not this processor should write to the store */ VersionedStoreContentCheckerProcessor(final boolean writeToStore) { this(writeToStore, new DataTracker()); } /** * @param writeToStore whether or not this processor should write to the store * @param initialData expected store contents which have already been inserted from * outside of this processor */ VersionedStoreContentCheckerProcessor(final boolean writeToStore, final DataTracker initialData) { this.writeToStore = writeToStore; this.data = initialData; } @Override public void init(final ProcessorContext<Integer, Integer> context) { this.context = context; store = context.getStateStore(STORE_NAME); } @Override public void process(final Record<Integer, String> record) { if (writeToStore) { // add record to store. special value "delete" is interpreted as a delete() call, // in contrast to null value, which is a tombstone inserted via put() if (DataTracker.DELETE_VALUE_KEYWORD.equals(record.value())) { store.delete(record.key(), record.timestamp()); } else { store.put(record.key(), record.value(), record.timestamp()); } data.add(record.key(), record.timestamp(), record.value()); } // check expected contents of store, and signal completion by writing // number of failures to downstream final int failedChecks = checkStoreContents(); context.forward(record.withValue(failedChecks)); } /** * @return number of failed checks */ private int checkStoreContents() { int failedChecks = 0; for (final Map.Entry<Integer, Map<Long, Optional<String>>> keyWithTimestampsAndValues : data.data.entrySet()) { final Integer key = keyWithTimestampsAndValues.getKey(); final Map<Long, Optional<String>> timestampsAndValues = keyWithTimestampsAndValues.getValue(); // track largest timestamp seen for key long maxExpectedTimestamp = -1L; String expectedValueForMaxTimestamp = null; for (final Map.Entry<Long, Optional<String>> timestampAndValue : timestampsAndValues.entrySet()) { final Long expectedTimestamp = timestampAndValue.getKey(); final String expectedValue = timestampAndValue.getValue().orElse(null); if (expectedTimestamp > maxExpectedTimestamp) { maxExpectedTimestamp = expectedTimestamp; expectedValueForMaxTimestamp = expectedValue; } // validate timestamped get on store final VersionedRecord<String> versionedRecord = store.get(key, expectedTimestamp); if (!contentsMatch(versionedRecord, expectedValue, expectedTimestamp)) { failedChecks++; } } // validate get latest on store final VersionedRecord<String> versionedRecord = store.get(key); if (!contentsMatch(versionedRecord, expectedValueForMaxTimestamp, maxExpectedTimestamp)) { failedChecks++; } } return failedChecks; } private static boolean contentsMatch(final VersionedRecord<String> versionedRecord, final String expectedValue, final long expectedTimestamp) { if (expectedValue == null) { return versionedRecord == null; } else { if (versionedRecord == null) { return false; } return expectedValue.equals(versionedRecord.value()) && expectedTimestamp == versionedRecord.timestamp(); } } } /** * Same as {@link VersionedStoreContentCheckerProcessor} but for timestamped stores instead, * for use in validating the manual upgrade path from non-versioned to versioned stores. */ private static class TimestampedStoreContentCheckerProcessor implements Processor<Integer, String, Integer, Integer> { private ProcessorContext<Integer, Integer> context; private TimestampedKeyValueStore<Integer, String> store; // in-memory copy of seen data, to validate for testing purposes. private final Map<Integer, Optional<ValueAndTimestamp<String>>> data; TimestampedStoreContentCheckerProcessor() { this.data = new HashMap<>(); } @Override public void init(final ProcessorContext<Integer, Integer> context) { this.context = context; store = context.getStateStore(STORE_NAME); } @Override public void process(final Record<Integer, String> record) { // add record to store if (DataTracker.DELETE_VALUE_KEYWORD.equals(record.value())) { // special value "delete" is interpreted as a delete() call from // VersionedStoreContentCheckerProcessor but we do not support it here throw new IllegalArgumentException("Using 'delete' keyword for " + "TimestampedStoreContentCheckerProcessor will result in the record " + "timestamp being ignored. Use regular put with null value instead."); } final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make(record.value(), record.timestamp()); store.put(record.key(), valueAndTimestamp); data.put(record.key(), Optional.ofNullable(valueAndTimestamp)); // check expected contents of store, and signal completion by writing // number of failures to downstream final int failedChecks = checkStoreContents(); context.forward(record.withValue(failedChecks)); } /** * @return number of failed checks */ private int checkStoreContents() { int failedChecks = 0; for (final Map.Entry<Integer, Optional<ValueAndTimestamp<String>>> keyWithValueAndTimestamp : data.entrySet()) { final Integer key = keyWithValueAndTimestamp.getKey(); final ValueAndTimestamp<String> valueAndTimestamp = keyWithValueAndTimestamp.getValue().orElse(null); // validate get from store final ValueAndTimestamp<String> record = store.get(key); if (!Objects.equals(record, valueAndTimestamp)) { failedChecks++; } } return failedChecks; } } /** * Same as {@link VersionedStoreContentCheckerProcessor} but for regular key-value stores instead, * for use in validating the manual upgrade path from non-versioned to versioned stores. */ private static class KeyValueStoreContentCheckerProcessor implements Processor<Integer, String, Integer, Integer> { private ProcessorContext<Integer, Integer> context; private KeyValueStore<Integer, String> store; // in-memory copy of seen data, to validate for testing purposes. private final Map<Integer, Optional<String>> data; KeyValueStoreContentCheckerProcessor() { this.data = new HashMap<>(); } @Override public void init(final ProcessorContext<Integer, Integer> context) { this.context = context; store = context.getStateStore(STORE_NAME); } @Override public void process(final Record<Integer, String> record) { // add record to store if (DataTracker.DELETE_VALUE_KEYWORD.equals(record.value())) { // special value "delete" is interpreted as a delete() call from // VersionedStoreContentCheckerProcessor but we do not support it here throw new IllegalArgumentException("Using 'delete' keyword for " + "KeyValueStoreContentCheckerProcessor will result in the record " + "timestamp being ignored. Use regular put with null value instead."); } store.put(record.key(), record.value()); data.put(record.key(), Optional.ofNullable(record.value())); // check expected contents of store, and signal completion by writing // number of failures to downstream final int failedChecks = checkStoreContents(); context.forward(record.withValue(failedChecks)); } /** * @return number of failed checks */ private int checkStoreContents() { int failedChecks = 0; for (final Map.Entry<Integer, Optional<String>> keyValue : data.entrySet()) { final Integer expectedKey = keyValue.getKey(); final String expectedValue = keyValue.getValue().orElse(null); // validate get from store final String foundValue = store.get(expectedKey); if (!Objects.equals(foundValue, expectedValue)) { failedChecks++; } } return failedChecks; } } /** * In-memory copy of data put to versioned store, for verification purposes. */ private static class DataTracker { // special value which is interpreted as call to store.delete() static final String DELETE_VALUE_KEYWORD = "delete"; // maps from key -> timestamp -> value. // value is represented as Optional to ensure proper recording of nulls. final Map<Integer, Map<Long, Optional<String>>> data = new HashMap<>(); void add(final Integer key, final long timestamp, final String value) { data.computeIfAbsent(key, k -> new HashMap<>()); if (DELETE_VALUE_KEYWORD.equals(value)) { data.get(key).put(timestamp, Optional.empty()); } else { data.get(key).put(timestamp, Optional.ofNullable(value)); } } } /** * Custom {@link Query} type for test purposes. Versioned stores do not currently support the * built-in {@link KeyQuery} or {@link RangeQuery} query types, but custom types are allowed. */ private static class TestQuery implements Query<String> { } /** * Supplies a custom {@link VersionedKeyValueStore} implementation solely for the purpose * of testing IQv2 queries. A hard-coded "success" result is returned in response to * {@code TestQuery} queries. */ private static class CustomIQv2VersionedStoreSupplier implements VersionedBytesStoreSupplier { @Override public String name() { return STORE_NAME; } @Override public KeyValueStore<Bytes, byte[]> get() { return new VersionedKeyValueToBytesStoreAdapter(new CustomIQv2VersionedStore()); } @Override public String metricsScope() { return "metrics-scope"; } @Override public long historyRetentionMs() { return HISTORY_RETENTION; } /** * Custom {@link VersionedKeyValueStore} implementation solely for the purpose of testing * IQv2 queries. All other methods are unsupported / no-ops. */ private static class CustomIQv2VersionedStore implements VersionedKeyValueStore<Bytes, byte[]> { @SuppressWarnings("unchecked") @Override public <R> QueryResult<R> query(final Query<R> query, final PositionBound positionBound, final QueryConfig config) { if (query instanceof TestQuery) { return (QueryResult<R>) QueryResult.forResult("success"); } else { throw new UnsupportedOperationException(); } } @Override public long put(final Bytes key, final byte[] value, final long timestamp) { throw new UnsupportedOperationException(); } @Override public VersionedRecord<byte[]> delete(final Bytes key, final long timestamp) { throw new UnsupportedOperationException(); } @Override public VersionedRecord<byte[]> get(final Bytes key) { throw new UnsupportedOperationException(); } @Override public VersionedRecord<byte[]> get(final Bytes key, final long asOfTimestamp) { throw new UnsupportedOperationException(); } @Override public String name() { return STORE_NAME; } @Override public void init(final StateStoreContext stateStoreContext, final StateStore root) { stateStoreContext.register( root, (key, value) -> { } ); } @Override public void flush() { // do nothing } @Override public void close() { // do nothing } @Override public boolean persistent() { return false; } @Override public boolean isOpen() { return true; } } } }
apache/lucene
37,390
lucene/core/src/java/org/apache/lucene/codecs/lucene102/Lucene102BinaryQuantizedVectorsWriter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.codecs.lucene102; import static org.apache.lucene.codecs.lucene102.Lucene102BinaryQuantizedVectorsFormat.BINARIZED_VECTOR_COMPONENT; import static org.apache.lucene.codecs.lucene102.Lucene102BinaryQuantizedVectorsFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; import static org.apache.lucene.codecs.lucene102.Lucene102BinaryQuantizedVectorsFormat.INDEX_BITS; import static org.apache.lucene.codecs.lucene102.Lucene102BinaryQuantizedVectorsFormat.QUERY_BITS; import static org.apache.lucene.index.VectorSimilarityFunction.COSINE; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; import static org.apache.lucene.util.RamUsageEstimator.shallowSizeOfInstance; import static org.apache.lucene.util.quantization.OptimizedScalarQuantizer.discretize; import static org.apache.lucene.util.quantization.OptimizedScalarQuantizer.packAsBinary; import static org.apache.lucene.util.quantization.OptimizedScalarQuantizer.transposeHalfByte; import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.KnnVectorsReader; import org.apache.lucene.codecs.hnsw.FlatFieldVectorsWriter; import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; import org.apache.lucene.codecs.lucene95.OrdToDocDISIReaderConfiguration; import org.apache.lucene.index.DocsWithFieldSet; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.internal.hppc.FloatArrayList; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.VectorUtil; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.hnsw.UpdateableRandomVectorScorer; import org.apache.lucene.util.quantization.OptimizedScalarQuantizer; /** Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ public class Lucene102BinaryQuantizedVectorsWriter extends FlatVectorsWriter { private static final long SHALLOW_RAM_BYTES_USED = shallowSizeOfInstance(Lucene102BinaryQuantizedVectorsWriter.class); private final SegmentWriteState segmentWriteState; private final List<FieldWriter> fields = new ArrayList<>(); private final IndexOutput meta, binarizedVectorData; private final FlatVectorsWriter rawVectorDelegate; private final Lucene102BinaryFlatVectorsScorer vectorsScorer; private boolean finished; /** * Sole constructor * * @param vectorsScorer the scorer to use for scoring vectors */ protected Lucene102BinaryQuantizedVectorsWriter( Lucene102BinaryFlatVectorsScorer vectorsScorer, FlatVectorsWriter rawVectorDelegate, SegmentWriteState state) throws IOException { super(vectorsScorer); this.vectorsScorer = vectorsScorer; this.segmentWriteState = state; String metaFileName = IndexFileNames.segmentFileName( state.segmentInfo.name, state.segmentSuffix, Lucene102BinaryQuantizedVectorsFormat.META_EXTENSION); String binarizedVectorDataFileName = IndexFileNames.segmentFileName( state.segmentInfo.name, state.segmentSuffix, Lucene102BinaryQuantizedVectorsFormat.VECTOR_DATA_EXTENSION); this.rawVectorDelegate = rawVectorDelegate; try { meta = state.directory.createOutput(metaFileName, state.context); binarizedVectorData = state.directory.createOutput(binarizedVectorDataFileName, state.context); CodecUtil.writeIndexHeader( meta, Lucene102BinaryQuantizedVectorsFormat.META_CODEC_NAME, Lucene102BinaryQuantizedVectorsFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); CodecUtil.writeIndexHeader( binarizedVectorData, Lucene102BinaryQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME, Lucene102BinaryQuantizedVectorsFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); } catch (Throwable t) { IOUtils.closeWhileSuppressingExceptions(t, this); throw t; } } @Override public FlatFieldVectorsWriter<?> addField(FieldInfo fieldInfo) throws IOException { FlatFieldVectorsWriter<?> rawVectorDelegate = this.rawVectorDelegate.addField(fieldInfo); if (fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) { @SuppressWarnings("unchecked") FieldWriter fieldWriter = new FieldWriter(fieldInfo, (FlatFieldVectorsWriter<float[]>) rawVectorDelegate); fields.add(fieldWriter); return fieldWriter; } return rawVectorDelegate; } @Override public void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException { rawVectorDelegate.flush(maxDoc, sortMap); for (FieldWriter field : fields) { // after raw vectors are written, normalize vectors for clustering and quantization if (VectorSimilarityFunction.COSINE == field.fieldInfo.getVectorSimilarityFunction()) { field.normalizeVectors(); } final float[] clusterCenter; int vectorCount = field.flatFieldVectorsWriter.getVectors().size(); clusterCenter = new float[field.dimensionSums.length]; if (vectorCount > 0) { for (int i = 0; i < field.dimensionSums.length; i++) { clusterCenter[i] = field.dimensionSums[i] / vectorCount; } if (VectorSimilarityFunction.COSINE == field.fieldInfo.getVectorSimilarityFunction()) { VectorUtil.l2normalize(clusterCenter); } } if (segmentWriteState.infoStream.isEnabled(BINARIZED_VECTOR_COMPONENT)) { segmentWriteState.infoStream.message( BINARIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount); } OptimizedScalarQuantizer quantizer = new OptimizedScalarQuantizer(field.fieldInfo.getVectorSimilarityFunction()); if (sortMap == null) { writeField(field, clusterCenter, maxDoc, quantizer); } else { writeSortingField(field, clusterCenter, maxDoc, sortMap, quantizer); } field.finish(); } } private void writeField( FieldWriter fieldData, float[] clusterCenter, int maxDoc, OptimizedScalarQuantizer quantizer) throws IOException { // write vector values long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); writeBinarizedVectors(fieldData, clusterCenter, quantizer); long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; float centroidDp = fieldData.getVectors().size() > 0 ? VectorUtil.dotProduct(clusterCenter, clusterCenter) : 0; writeMeta( fieldData.fieldInfo, maxDoc, vectorDataOffset, vectorDataLength, clusterCenter, centroidDp, fieldData.getDocsWithFieldSet()); } private void writeBinarizedVectors( FieldWriter fieldData, float[] clusterCenter, OptimizedScalarQuantizer scalarQuantizer) throws IOException { int discreteDims = discretize(fieldData.fieldInfo.getVectorDimension(), 64); byte[] quantizationScratch = new byte[discreteDims]; byte[] vector = new byte[discreteDims / 8]; for (int i = 0; i < fieldData.getVectors().size(); i++) { float[] v = fieldData.getVectors().get(i); OptimizedScalarQuantizer.QuantizationResult corrections = scalarQuantizer.scalarQuantize(v, quantizationScratch, INDEX_BITS, clusterCenter); packAsBinary(quantizationScratch, vector); binarizedVectorData.writeBytes(vector, vector.length); binarizedVectorData.writeInt(Float.floatToIntBits(corrections.lowerInterval())); binarizedVectorData.writeInt(Float.floatToIntBits(corrections.upperInterval())); binarizedVectorData.writeInt(Float.floatToIntBits(corrections.additionalCorrection())); assert corrections.quantizedComponentSum() >= 0 && corrections.quantizedComponentSum() <= 0xffff; binarizedVectorData.writeShort((short) corrections.quantizedComponentSum()); } } private void writeSortingField( FieldWriter fieldData, float[] clusterCenter, int maxDoc, Sorter.DocMap sortMap, OptimizedScalarQuantizer scalarQuantizer) throws IOException { final int[] ordMap = new int[fieldData.getDocsWithFieldSet().cardinality()]; // new ord to old ord DocsWithFieldSet newDocsWithField = new DocsWithFieldSet(); mapOldOrdToNewOrd(fieldData.getDocsWithFieldSet(), sortMap, null, ordMap, newDocsWithField); // write vector values long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); writeSortedBinarizedVectors(fieldData, clusterCenter, ordMap, scalarQuantizer); long quantizedVectorLength = binarizedVectorData.getFilePointer() - vectorDataOffset; float centroidDp = VectorUtil.dotProduct(clusterCenter, clusterCenter); writeMeta( fieldData.fieldInfo, maxDoc, vectorDataOffset, quantizedVectorLength, clusterCenter, centroidDp, newDocsWithField); } private void writeSortedBinarizedVectors( FieldWriter fieldData, float[] clusterCenter, int[] ordMap, OptimizedScalarQuantizer scalarQuantizer) throws IOException { int discreteDims = discretize(fieldData.fieldInfo.getVectorDimension(), 64); byte[] quantizationScratch = new byte[discreteDims]; byte[] vector = new byte[discreteDims / 8]; for (int ordinal : ordMap) { float[] v = fieldData.getVectors().get(ordinal); OptimizedScalarQuantizer.QuantizationResult corrections = scalarQuantizer.scalarQuantize(v, quantizationScratch, INDEX_BITS, clusterCenter); packAsBinary(quantizationScratch, vector); binarizedVectorData.writeBytes(vector, vector.length); binarizedVectorData.writeInt(Float.floatToIntBits(corrections.lowerInterval())); binarizedVectorData.writeInt(Float.floatToIntBits(corrections.upperInterval())); binarizedVectorData.writeInt(Float.floatToIntBits(corrections.additionalCorrection())); assert corrections.quantizedComponentSum() >= 0 && corrections.quantizedComponentSum() <= 0xffff; binarizedVectorData.writeShort((short) corrections.quantizedComponentSum()); } } private void writeMeta( FieldInfo field, int maxDoc, long vectorDataOffset, long vectorDataLength, float[] clusterCenter, float centroidDp, DocsWithFieldSet docsWithField) throws IOException { meta.writeInt(field.number); meta.writeInt(field.getVectorEncoding().ordinal()); meta.writeInt(field.getVectorSimilarityFunction().ordinal()); meta.writeVInt(field.getVectorDimension()); meta.writeVLong(vectorDataOffset); meta.writeVLong(vectorDataLength); int count = docsWithField.cardinality(); meta.writeVInt(count); if (count > 0) { final ByteBuffer buffer = ByteBuffer.allocate(field.getVectorDimension() * Float.BYTES) .order(ByteOrder.LITTLE_ENDIAN); buffer.asFloatBuffer().put(clusterCenter); meta.writeBytes(buffer.array(), buffer.array().length); meta.writeInt(Float.floatToIntBits(centroidDp)); } OrdToDocDISIReaderConfiguration.writeStoredMeta( DIRECT_MONOTONIC_BLOCK_SHIFT, meta, binarizedVectorData, count, maxDoc, docsWithField); } @Override public void finish() throws IOException { if (finished) { throw new IllegalStateException("already finished"); } finished = true; rawVectorDelegate.finish(); if (meta != null) { // write end of fields marker meta.writeInt(-1); CodecUtil.writeFooter(meta); } if (binarizedVectorData != null) { CodecUtil.writeFooter(binarizedVectorData); } } @Override public void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) throws IOException { if (fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) { final float[] centroid; final float[] mergedCentroid = new float[fieldInfo.getVectorDimension()]; int vectorCount = mergeAndRecalculateCentroids(mergeState, fieldInfo, mergedCentroid); // Don't need access to the random vectors, we can just use the merged rawVectorDelegate.mergeOneField(fieldInfo, mergeState); centroid = mergedCentroid; if (segmentWriteState.infoStream.isEnabled(BINARIZED_VECTOR_COMPONENT)) { segmentWriteState.infoStream.message( BINARIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount); } FloatVectorValues floatVectorValues = MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState); if (fieldInfo.getVectorSimilarityFunction() == COSINE) { floatVectorValues = new NormalizedFloatVectorValues(floatVectorValues); } BinarizedFloatVectorValues binarizedVectorValues = new BinarizedFloatVectorValues( floatVectorValues, new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction()), centroid); long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); DocsWithFieldSet docsWithField = writeBinarizedVectorData(binarizedVectorData, binarizedVectorValues); long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; float centroidDp = docsWithField.cardinality() > 0 ? VectorUtil.dotProduct(centroid, centroid) : 0; writeMeta( fieldInfo, segmentWriteState.segmentInfo.maxDoc(), vectorDataOffset, vectorDataLength, centroid, centroidDp, docsWithField); } else { rawVectorDelegate.mergeOneField(fieldInfo, mergeState); } } static DocsWithFieldSet writeBinarizedVectorAndQueryData( IndexOutput binarizedVectorData, IndexOutput binarizedQueryData, FloatVectorValues floatVectorValues, float[] centroid, OptimizedScalarQuantizer binaryQuantizer) throws IOException { int discretizedDimension = discretize(floatVectorValues.dimension(), 64); DocsWithFieldSet docsWithField = new DocsWithFieldSet(); byte[][] quantizationScratch = new byte[2][floatVectorValues.dimension()]; byte[] toIndex = new byte[discretizedDimension / 8]; byte[] toQuery = new byte[(discretizedDimension / 8) * QUERY_BITS]; KnnVectorValues.DocIndexIterator iterator = floatVectorValues.iterator(); for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write index vector OptimizedScalarQuantizer.QuantizationResult[] r = binaryQuantizer.multiScalarQuantize( floatVectorValues.vectorValue(iterator.index()), quantizationScratch, new byte[] {INDEX_BITS, QUERY_BITS}, centroid); // pack and store document bit vector packAsBinary(quantizationScratch[0], toIndex); binarizedVectorData.writeBytes(toIndex, toIndex.length); binarizedVectorData.writeInt(Float.floatToIntBits(r[0].lowerInterval())); binarizedVectorData.writeInt(Float.floatToIntBits(r[0].upperInterval())); binarizedVectorData.writeInt(Float.floatToIntBits(r[0].additionalCorrection())); assert r[0].quantizedComponentSum() >= 0 && r[0].quantizedComponentSum() <= 0xffff; binarizedVectorData.writeShort((short) r[0].quantizedComponentSum()); docsWithField.add(docV); // pack and store the 4bit query vector transposeHalfByte(quantizationScratch[1], toQuery); binarizedQueryData.writeBytes(toQuery, toQuery.length); binarizedQueryData.writeInt(Float.floatToIntBits(r[1].lowerInterval())); binarizedQueryData.writeInt(Float.floatToIntBits(r[1].upperInterval())); binarizedQueryData.writeInt(Float.floatToIntBits(r[1].additionalCorrection())); assert r[1].quantizedComponentSum() >= 0 && r[1].quantizedComponentSum() <= 0xffff; binarizedQueryData.writeShort((short) r[1].quantizedComponentSum()); } return docsWithField; } static DocsWithFieldSet writeBinarizedVectorData( IndexOutput output, BinarizedByteVectorValues binarizedByteVectorValues) throws IOException { DocsWithFieldSet docsWithField = new DocsWithFieldSet(); KnnVectorValues.DocIndexIterator iterator = binarizedByteVectorValues.iterator(); for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write vector byte[] binaryValue = binarizedByteVectorValues.vectorValue(iterator.index()); output.writeBytes(binaryValue, binaryValue.length); OptimizedScalarQuantizer.QuantizationResult corrections = binarizedByteVectorValues.getCorrectiveTerms(iterator.index()); output.writeInt(Float.floatToIntBits(corrections.lowerInterval())); output.writeInt(Float.floatToIntBits(corrections.upperInterval())); output.writeInt(Float.floatToIntBits(corrections.additionalCorrection())); assert corrections.quantizedComponentSum() >= 0 && corrections.quantizedComponentSum() <= 0xffff; output.writeShort((short) corrections.quantizedComponentSum()); docsWithField.add(docV); } return docsWithField; } @Override public CloseableRandomVectorScorerSupplier mergeOneFieldToIndex( FieldInfo fieldInfo, MergeState mergeState) throws IOException { if (fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) { final float[] centroid; final float cDotC; final float[] mergedCentroid = new float[fieldInfo.getVectorDimension()]; int vectorCount = mergeAndRecalculateCentroids(mergeState, fieldInfo, mergedCentroid); // Don't need access to the random vectors, we can just use the merged rawVectorDelegate.mergeOneField(fieldInfo, mergeState); centroid = mergedCentroid; cDotC = vectorCount > 0 ? VectorUtil.dotProduct(centroid, centroid) : 0; if (segmentWriteState.infoStream.isEnabled(BINARIZED_VECTOR_COMPONENT)) { segmentWriteState.infoStream.message( BINARIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount); } return mergeOneFieldToIndex(segmentWriteState, fieldInfo, mergeState, centroid, cDotC); } return rawVectorDelegate.mergeOneFieldToIndex(fieldInfo, mergeState); } private CloseableRandomVectorScorerSupplier mergeOneFieldToIndex( SegmentWriteState segmentWriteState, FieldInfo fieldInfo, MergeState mergeState, float[] centroid, float cDotC) throws IOException { long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); IndexOutput tempQuantizedVectorData = null; IndexOutput tempScoreQuantizedVectorData = null; IndexInput binarizedDataInput = null; IndexInput binarizedScoreDataInput = null; OptimizedScalarQuantizer quantizer = new OptimizedScalarQuantizer(fieldInfo.getVectorSimilarityFunction()); try { tempQuantizedVectorData = segmentWriteState.directory.createTempOutput( binarizedVectorData.getName(), "temp", segmentWriteState.context); tempScoreQuantizedVectorData = segmentWriteState.directory.createTempOutput( binarizedVectorData.getName(), "score_temp", segmentWriteState.context); final String tempQuantizedVectorName = tempQuantizedVectorData.getName(); final String tempScoreQuantizedVectorName = tempScoreQuantizedVectorData.getName(); FloatVectorValues floatVectorValues = MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState); if (fieldInfo.getVectorSimilarityFunction() == COSINE) { floatVectorValues = new NormalizedFloatVectorValues(floatVectorValues); } DocsWithFieldSet docsWithField = writeBinarizedVectorAndQueryData( tempQuantizedVectorData, tempScoreQuantizedVectorData, floatVectorValues, centroid, quantizer); CodecUtil.writeFooter(tempQuantizedVectorData); IOUtils.close(tempQuantizedVectorData); binarizedDataInput = segmentWriteState.directory.openInput(tempQuantizedVectorName, segmentWriteState.context); binarizedVectorData.copyBytes( binarizedDataInput, binarizedDataInput.length() - CodecUtil.footerLength()); long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; CodecUtil.retrieveChecksum(binarizedDataInput); CodecUtil.writeFooter(tempScoreQuantizedVectorData); IOUtils.close(tempScoreQuantizedVectorData); binarizedScoreDataInput = segmentWriteState.directory.openInput( tempScoreQuantizedVectorName, segmentWriteState.context); writeMeta( fieldInfo, segmentWriteState.segmentInfo.maxDoc(), vectorDataOffset, vectorDataLength, centroid, cDotC, docsWithField); final IndexInput finalBinarizedDataInput = binarizedDataInput; final IndexInput finalBinarizedScoreDataInput = binarizedScoreDataInput; tempQuantizedVectorData = null; tempScoreQuantizedVectorData = null; binarizedDataInput = null; binarizedScoreDataInput = null; OffHeapBinarizedVectorValues vectorValues = new OffHeapBinarizedVectorValues.DenseOffHeapVectorValues( fieldInfo.getVectorDimension(), docsWithField.cardinality(), centroid, cDotC, quantizer, fieldInfo.getVectorSimilarityFunction(), vectorsScorer, finalBinarizedDataInput); RandomVectorScorerSupplier scorerSupplier = vectorsScorer.getRandomVectorScorerSupplier( fieldInfo.getVectorSimilarityFunction(), new OffHeapBinarizedQueryVectorValues( finalBinarizedScoreDataInput, fieldInfo.getVectorDimension(), docsWithField.cardinality()), vectorValues); return new BinarizedCloseableRandomVectorScorerSupplier( scorerSupplier, vectorValues, () -> { IOUtils.close(finalBinarizedDataInput, finalBinarizedScoreDataInput); IOUtils.deleteFilesIgnoringExceptions( segmentWriteState.directory, tempQuantizedVectorName, tempScoreQuantizedVectorName); }); } catch (Throwable t) { IOUtils.closeWhileSuppressingExceptions( t, tempQuantizedVectorData, tempScoreQuantizedVectorData, binarizedDataInput, binarizedScoreDataInput); if (tempQuantizedVectorData != null) { IOUtils.deleteFilesSuppressingExceptions( t, segmentWriteState.directory, tempQuantizedVectorData.getName()); } if (tempScoreQuantizedVectorData != null) { IOUtils.deleteFilesSuppressingExceptions( t, segmentWriteState.directory, tempScoreQuantizedVectorData.getName()); } throw t; } } @Override public void close() throws IOException { IOUtils.close(meta, binarizedVectorData, rawVectorDelegate); } static float[] getCentroid(KnnVectorsReader vectorsReader, String fieldName) { vectorsReader = vectorsReader.unwrapReaderForField(fieldName); if (vectorsReader instanceof Lucene102BinaryQuantizedVectorsReader reader) { return reader.getCentroid(fieldName); } return null; } static int mergeAndRecalculateCentroids( MergeState mergeState, FieldInfo fieldInfo, float[] mergedCentroid) throws IOException { boolean recalculate = false; int totalVectorCount = 0; for (int i = 0; i < mergeState.knnVectorsReaders.length; i++) { KnnVectorsReader knnVectorsReader = mergeState.knnVectorsReaders[i]; if (knnVectorsReader == null || knnVectorsReader.getFloatVectorValues(fieldInfo.name) == null) { continue; } float[] centroid = getCentroid(knnVectorsReader, fieldInfo.name); int vectorCount = knnVectorsReader.getFloatVectorValues(fieldInfo.name).size(); if (vectorCount == 0) { continue; } totalVectorCount += vectorCount; // If there aren't centroids, or previously clustered with more than one cluster // or if there are deleted docs, we must recalculate the centroid if (centroid == null || mergeState.liveDocs[i] != null) { recalculate = true; break; } for (int j = 0; j < centroid.length; j++) { mergedCentroid[j] += centroid[j] * vectorCount; } } if (recalculate) { return calculateCentroid(mergeState, fieldInfo, mergedCentroid); } else { for (int j = 0; j < mergedCentroid.length; j++) { mergedCentroid[j] = mergedCentroid[j] / totalVectorCount; } if (fieldInfo.getVectorSimilarityFunction() == COSINE) { VectorUtil.l2normalize(mergedCentroid); } return totalVectorCount; } } static int calculateCentroid(MergeState mergeState, FieldInfo fieldInfo, float[] centroid) throws IOException { assert fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32); // clear out the centroid Arrays.fill(centroid, 0); int count = 0; for (int i = 0; i < mergeState.knnVectorsReaders.length; i++) { KnnVectorsReader knnVectorsReader = mergeState.knnVectorsReaders[i]; if (knnVectorsReader == null) continue; FloatVectorValues vectorValues = mergeState.knnVectorsReaders[i].getFloatVectorValues(fieldInfo.name); if (vectorValues == null) { continue; } KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { ++count; float[] vector = vectorValues.vectorValue(iterator.index()); for (int j = 0; j < vector.length; j++) { centroid[j] += vector[j]; } } } if (count == 0) { return count; } for (int i = 0; i < centroid.length; i++) { centroid[i] /= count; } if (fieldInfo.getVectorSimilarityFunction() == COSINE) { VectorUtil.l2normalize(centroid); } return count; } @Override public long ramBytesUsed() { long total = SHALLOW_RAM_BYTES_USED; for (FieldWriter field : fields) { // the field tracks the delegate field usage total += field.ramBytesUsed(); } return total; } static class FieldWriter extends FlatFieldVectorsWriter<float[]> { private static final long SHALLOW_SIZE = shallowSizeOfInstance(FieldWriter.class); private final FieldInfo fieldInfo; private boolean finished; private final FlatFieldVectorsWriter<float[]> flatFieldVectorsWriter; private final float[] dimensionSums; private final FloatArrayList magnitudes = new FloatArrayList(); FieldWriter(FieldInfo fieldInfo, FlatFieldVectorsWriter<float[]> flatFieldVectorsWriter) { this.fieldInfo = fieldInfo; this.flatFieldVectorsWriter = flatFieldVectorsWriter; this.dimensionSums = new float[fieldInfo.getVectorDimension()]; } @Override public List<float[]> getVectors() { return flatFieldVectorsWriter.getVectors(); } public void normalizeVectors() { for (int i = 0; i < flatFieldVectorsWriter.getVectors().size(); i++) { float[] vector = flatFieldVectorsWriter.getVectors().get(i); float magnitude = magnitudes.get(i); for (int j = 0; j < vector.length; j++) { vector[j] /= magnitude; } } } @Override public DocsWithFieldSet getDocsWithFieldSet() { return flatFieldVectorsWriter.getDocsWithFieldSet(); } @Override public void finish() throws IOException { if (finished) { return; } assert flatFieldVectorsWriter.isFinished(); finished = true; } @Override public boolean isFinished() { return finished && flatFieldVectorsWriter.isFinished(); } @Override public void addValue(int docID, float[] vectorValue) throws IOException { flatFieldVectorsWriter.addValue(docID, vectorValue); if (fieldInfo.getVectorSimilarityFunction() == COSINE) { float dp = VectorUtil.dotProduct(vectorValue, vectorValue); float divisor = (float) Math.sqrt(dp); magnitudes.add(divisor); for (int i = 0; i < vectorValue.length; i++) { dimensionSums[i] += (vectorValue[i] / divisor); } } else { for (int i = 0; i < vectorValue.length; i++) { dimensionSums[i] += vectorValue[i]; } } } @Override public float[] copyValue(float[] vectorValue) { throw new UnsupportedOperationException(); } @Override public long ramBytesUsed() { long size = SHALLOW_SIZE; size += flatFieldVectorsWriter.ramBytesUsed(); size += magnitudes.ramBytesUsed(); return size; } } // When accessing vectorValue method, targerOrd here means a row ordinal. static class OffHeapBinarizedQueryVectorValues { private final IndexInput slice; private final int dimension; private final int size; protected final byte[] binaryValue; protected final ByteBuffer byteBuffer; private final int byteSize; protected final float[] correctiveValues; private int lastOrd = -1; private int quantizedComponentSum; OffHeapBinarizedQueryVectorValues(IndexInput data, int dimension, int size) { this.slice = data; this.dimension = dimension; this.size = size; // 4x the quantized binary dimensions int binaryDimensions = (discretize(dimension, 64) / 8) * QUERY_BITS; this.byteBuffer = ByteBuffer.allocate(binaryDimensions); this.binaryValue = byteBuffer.array(); // + 1 for the quantized sum this.correctiveValues = new float[3]; this.byteSize = binaryDimensions + Float.BYTES * 3 + Short.BYTES; } public OptimizedScalarQuantizer.QuantizationResult getCorrectiveTerms(int targetOrd) throws IOException { if (lastOrd == targetOrd) { return new OptimizedScalarQuantizer.QuantizationResult( correctiveValues[0], correctiveValues[1], correctiveValues[2], quantizedComponentSum); } vectorValue(targetOrd); return new OptimizedScalarQuantizer.QuantizationResult( correctiveValues[0], correctiveValues[1], correctiveValues[2], quantizedComponentSum); } public int size() { return size; } public int quantizedLength() { return binaryValue.length; } public int dimension() { return dimension; } public OffHeapBinarizedQueryVectorValues copy() throws IOException { return new OffHeapBinarizedQueryVectorValues(slice.clone(), dimension, size); } public IndexInput getSlice() { return slice; } public byte[] vectorValue(int targetOrd) throws IOException { if (lastOrd == targetOrd) { return binaryValue; } slice.seek((long) targetOrd * byteSize); slice.readBytes(binaryValue, 0, binaryValue.length); slice.readFloats(correctiveValues, 0, 3); quantizedComponentSum = Short.toUnsignedInt(slice.readShort()); lastOrd = targetOrd; return binaryValue; } } static class BinarizedFloatVectorValues extends BinarizedByteVectorValues { private OptimizedScalarQuantizer.QuantizationResult corrections; private final byte[] binarized; private final byte[] initQuantized; private final float[] centroid; private final FloatVectorValues values; private final OptimizedScalarQuantizer quantizer; private int lastOrd = -1; BinarizedFloatVectorValues( FloatVectorValues delegate, OptimizedScalarQuantizer quantizer, float[] centroid) { this.values = delegate; this.quantizer = quantizer; this.binarized = new byte[discretize(delegate.dimension(), 64) / 8]; this.initQuantized = new byte[delegate.dimension()]; this.centroid = centroid; } @Override public OptimizedScalarQuantizer.QuantizationResult getCorrectiveTerms(int ord) { if (ord != lastOrd) { throw new IllegalStateException( "attempt to retrieve corrective terms for different ord " + ord + " than the quantization was done for: " + lastOrd); } return corrections; } @Override public byte[] vectorValue(int ord) throws IOException { if (ord != lastOrd) { binarize(ord); lastOrd = ord; } return binarized; } @Override public int dimension() { return values.dimension(); } @Override public OptimizedScalarQuantizer getQuantizer() { throw new UnsupportedOperationException(); } @Override public float[] getCentroid() throws IOException { return centroid; } @Override public int size() { return values.size(); } @Override public VectorScorer scorer(float[] target) throws IOException { throw new UnsupportedOperationException(); } @Override public BinarizedByteVectorValues copy() throws IOException { return new BinarizedFloatVectorValues(values.copy(), quantizer, centroid); } private void binarize(int ord) throws IOException { corrections = quantizer.scalarQuantize(values.vectorValue(ord), initQuantized, INDEX_BITS, centroid); packAsBinary(initQuantized, binarized); } @Override public DocIndexIterator iterator() { return values.iterator(); } @Override public int ordToDoc(int ord) { return values.ordToDoc(ord); } } static class BinarizedCloseableRandomVectorScorerSupplier implements CloseableRandomVectorScorerSupplier { private final RandomVectorScorerSupplier supplier; private final KnnVectorValues vectorValues; private final Closeable onClose; BinarizedCloseableRandomVectorScorerSupplier( RandomVectorScorerSupplier supplier, KnnVectorValues vectorValues, Closeable onClose) { this.supplier = supplier; this.onClose = onClose; this.vectorValues = vectorValues; } @Override public UpdateableRandomVectorScorer scorer() throws IOException { return supplier.scorer(); } @Override public RandomVectorScorerSupplier copy() throws IOException { return supplier.copy(); } @Override public void close() throws IOException { onClose.close(); } @Override public int totalVectorCount() { return vectorValues.size(); } } static final class NormalizedFloatVectorValues extends FloatVectorValues { private final FloatVectorValues values; private final float[] normalizedVector; NormalizedFloatVectorValues(FloatVectorValues values) { this.values = values; this.normalizedVector = new float[values.dimension()]; } @Override public int dimension() { return values.dimension(); } @Override public int size() { return values.size(); } @Override public int ordToDoc(int ord) { return values.ordToDoc(ord); } @Override public float[] vectorValue(int ord) throws IOException { System.arraycopy(values.vectorValue(ord), 0, normalizedVector, 0, normalizedVector.length); VectorUtil.l2normalize(normalizedVector); return normalizedVector; } @Override public DocIndexIterator iterator() { return values.iterator(); } @Override public NormalizedFloatVectorValues copy() throws IOException { return new NormalizedFloatVectorValues(values.copy()); } } }
googleapis/google-api-java-client-services
37,518
clients/google-api-services-content/v2.1/2.0.0/com/google/api/services/content/model/Metrics.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.content.model; /** * Performance metrics. Values are only set for metrics requested explicitly in the request's search * query. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Content API for Shopping. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Metrics extends com.google.api.client.json.GenericJson { /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average order size - the average number of items in an order. **This metric cannot be * segmented by product dimensions and customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double aos; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average order value in micros (1 millionth of a standard unit, 1 USD = 1000000 micros) - * the average value (total price of items) of all placed orders. The currency of the returned * value is stored in the currency_code segment. If this metric is selected, * 'segments.currency_code' is automatically added to the SELECT clause in the search query * (unless it is explicitly selected by the user) and the currency_code segment is populated in * the response. **This metric cannot be segmented by product dimensions and * customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double aovMicros; /** * Number of clicks. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long clicks; /** * Number of conversions divided by the number of clicks, reported on the impression date. The * metric is currently available only for the `FREE_PRODUCT_LISTING` program. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double conversionRate; /** * Value of conversions in micros (1 millionth of a standard unit, 1 USD = 1000000 micros) * attributed to the product, reported on the conversion date. The metric is currently available * only for the `FREE_PRODUCT_LISTING` program. The currency of the returned value is stored in * the currency_code segment. If this metric is selected, 'segments.currency_code' is * automatically added to the SELECT clause in the search query (unless it is explicitly selected * by the user) and the currency_code segment is populated in the response. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long conversionValueMicros; /** * Number of conversions attributed to the product, reported on the conversion date. Depending on * the attribution model, a conversion might be distributed across multiple clicks, where each * click gets its own credit assigned. This metric is a sum of all such credits. The metric is * currently available only for the `FREE_PRODUCT_LISTING` program. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double conversions; /** * Click-through rate - the number of clicks merchant's products receive (clicks) divided by the * number of times the products are shown (impressions). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double ctr; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average number of days between an order being placed and the order being fully shipped, * reported on the last shipment date. **This metric cannot be segmented by product dimensions and * customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double daysToShip; /** * Number of times merchant's products are shown. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long impressions; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average number of days between an item being ordered and the item being **This metric * cannot be segmented by customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double itemDaysToShip; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Percentage of shipped items in relation to all finalized items (shipped or rejected by * the merchant; unshipped items are not taken into account), reported on the order date. Item * fill rate is lowered by merchant rejections. **This metric cannot be segmented by * customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double itemFillRate; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of ordered items in micros (1 millionth of a standard unit, 1 USD = 1000000 * micros). Excludes shipping, taxes (US only), and customer cancellations that happened within 30 * minutes of placing the order. The currency of the returned value is stored in the currency_code * segment. If this metric is selected, 'segments.currency_code' is automatically added to the * SELECT clause in the search query (unless it is explicitly selected by the user) and the * currency_code segment is populated in the response. **This metric cannot be segmented by * customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long orderedItemSalesMicros; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items. Excludes customer cancellations that happened within 30 minutes * of placing the order. **This metric cannot be segmented by customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long orderedItems; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of placed orders. Excludes customer cancellations that happened within 30 minutes * of placing the order. **This metric cannot be segmented by product dimensions and * customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long orders; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items canceled by the merchant, reported on the order date. **This * metric cannot be segmented by customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long rejectedItems; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of returned items divided by the total price of shipped items, reported on * the order date. If this metric is selected, 'segments.currency_code' is automatically added to * the SELECT clause in the search query (unless it is explicitly selected by the user) and the * currency_code segment is populated in the response. **This metric cannot be segmented by * customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double returnRate; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items sent back for return, reported on the date when the merchant * accepted the return. **This metric cannot be segmented by customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long returnedItems; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of ordered items sent back for return in micros (1 millionth of a standard * unit, 1 USD = 1000000 micros), reported on the date when the merchant accepted the return. The * currency of the returned value is stored in the currency_code segment. If this metric is * selected, 'segments.currency_code' is automatically added to the SELECT clause in the search * query (unless it is explicitly selected by the user) and the currency_code segment is populated * in the response. **This metric cannot be segmented by customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long returnsMicros; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of shipped items in micros (1 millionth of a standard unit, 1 USD = 1000000 * micros), reported on the order date. Excludes shipping and taxes (US only). The currency of the * returned value is stored in the currency_code segment. If this metric is selected, * 'segments.currency_code' is automatically added to the SELECT clause in the search query * (unless it is explicitly selected by the user) and the currency_code segment is populated in * the response. **This metric cannot be segmented by customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long shippedItemSalesMicros; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of shipped items, reported on the shipment date. **This metric cannot be segmented * by customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long shippedItems; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of fully shipped orders, reported on the last shipment date. **This metric cannot * be segmented by product dimensions and customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long shippedOrders; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items not shipped up until the end of the queried day. If a multi-day * period is specified in the search query, the returned value is the average number of unshipped * items over the days in the queried period. **This metric cannot be segmented by * customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double unshippedItems; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of orders not shipped or partially shipped up until the end of the queried day. If * a multi-day period is specified in the search query, the returned value is the average number * of unshipped orders over the days in the queried period. **This metric cannot be segmented by * product dimensions and customer_country_code.** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double unshippedOrders; /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average order size - the average number of items in an order. **This metric cannot be * segmented by product dimensions and customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getAos() { return aos; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average order size - the average number of items in an order. **This metric cannot be * segmented by product dimensions and customer_country_code.** * @param aos aos or {@code null} for none */ public Metrics setAos(java.lang.Double aos) { this.aos = aos; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average order value in micros (1 millionth of a standard unit, 1 USD = 1000000 micros) - * the average value (total price of items) of all placed orders. The currency of the returned * value is stored in the currency_code segment. If this metric is selected, * 'segments.currency_code' is automatically added to the SELECT clause in the search query * (unless it is explicitly selected by the user) and the currency_code segment is populated in * the response. **This metric cannot be segmented by product dimensions and * customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getAovMicros() { return aovMicros; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average order value in micros (1 millionth of a standard unit, 1 USD = 1000000 micros) - * the average value (total price of items) of all placed orders. The currency of the returned * value is stored in the currency_code segment. If this metric is selected, * 'segments.currency_code' is automatically added to the SELECT clause in the search query * (unless it is explicitly selected by the user) and the currency_code segment is populated in * the response. **This metric cannot be segmented by product dimensions and * customer_country_code.** * @param aovMicros aovMicros or {@code null} for none */ public Metrics setAovMicros(java.lang.Double aovMicros) { this.aovMicros = aovMicros; return this; } /** * Number of clicks. * @return value or {@code null} for none */ public java.lang.Long getClicks() { return clicks; } /** * Number of clicks. * @param clicks clicks or {@code null} for none */ public Metrics setClicks(java.lang.Long clicks) { this.clicks = clicks; return this; } /** * Number of conversions divided by the number of clicks, reported on the impression date. The * metric is currently available only for the `FREE_PRODUCT_LISTING` program. * @return value or {@code null} for none */ public java.lang.Double getConversionRate() { return conversionRate; } /** * Number of conversions divided by the number of clicks, reported on the impression date. The * metric is currently available only for the `FREE_PRODUCT_LISTING` program. * @param conversionRate conversionRate or {@code null} for none */ public Metrics setConversionRate(java.lang.Double conversionRate) { this.conversionRate = conversionRate; return this; } /** * Value of conversions in micros (1 millionth of a standard unit, 1 USD = 1000000 micros) * attributed to the product, reported on the conversion date. The metric is currently available * only for the `FREE_PRODUCT_LISTING` program. The currency of the returned value is stored in * the currency_code segment. If this metric is selected, 'segments.currency_code' is * automatically added to the SELECT clause in the search query (unless it is explicitly selected * by the user) and the currency_code segment is populated in the response. * @return value or {@code null} for none */ public java.lang.Long getConversionValueMicros() { return conversionValueMicros; } /** * Value of conversions in micros (1 millionth of a standard unit, 1 USD = 1000000 micros) * attributed to the product, reported on the conversion date. The metric is currently available * only for the `FREE_PRODUCT_LISTING` program. The currency of the returned value is stored in * the currency_code segment. If this metric is selected, 'segments.currency_code' is * automatically added to the SELECT clause in the search query (unless it is explicitly selected * by the user) and the currency_code segment is populated in the response. * @param conversionValueMicros conversionValueMicros or {@code null} for none */ public Metrics setConversionValueMicros(java.lang.Long conversionValueMicros) { this.conversionValueMicros = conversionValueMicros; return this; } /** * Number of conversions attributed to the product, reported on the conversion date. Depending on * the attribution model, a conversion might be distributed across multiple clicks, where each * click gets its own credit assigned. This metric is a sum of all such credits. The metric is * currently available only for the `FREE_PRODUCT_LISTING` program. * @return value or {@code null} for none */ public java.lang.Double getConversions() { return conversions; } /** * Number of conversions attributed to the product, reported on the conversion date. Depending on * the attribution model, a conversion might be distributed across multiple clicks, where each * click gets its own credit assigned. This metric is a sum of all such credits. The metric is * currently available only for the `FREE_PRODUCT_LISTING` program. * @param conversions conversions or {@code null} for none */ public Metrics setConversions(java.lang.Double conversions) { this.conversions = conversions; return this; } /** * Click-through rate - the number of clicks merchant's products receive (clicks) divided by the * number of times the products are shown (impressions). * @return value or {@code null} for none */ public java.lang.Double getCtr() { return ctr; } /** * Click-through rate - the number of clicks merchant's products receive (clicks) divided by the * number of times the products are shown (impressions). * @param ctr ctr or {@code null} for none */ public Metrics setCtr(java.lang.Double ctr) { this.ctr = ctr; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average number of days between an order being placed and the order being fully shipped, * reported on the last shipment date. **This metric cannot be segmented by product dimensions and * customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getDaysToShip() { return daysToShip; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average number of days between an order being placed and the order being fully shipped, * reported on the last shipment date. **This metric cannot be segmented by product dimensions and * customer_country_code.** * @param daysToShip daysToShip or {@code null} for none */ public Metrics setDaysToShip(java.lang.Double daysToShip) { this.daysToShip = daysToShip; return this; } /** * Number of times merchant's products are shown. * @return value or {@code null} for none */ public java.lang.Long getImpressions() { return impressions; } /** * Number of times merchant's products are shown. * @param impressions impressions or {@code null} for none */ public Metrics setImpressions(java.lang.Long impressions) { this.impressions = impressions; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average number of days between an item being ordered and the item being **This metric * cannot be segmented by customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getItemDaysToShip() { return itemDaysToShip; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Average number of days between an item being ordered and the item being **This metric * cannot be segmented by customer_country_code.** * @param itemDaysToShip itemDaysToShip or {@code null} for none */ public Metrics setItemDaysToShip(java.lang.Double itemDaysToShip) { this.itemDaysToShip = itemDaysToShip; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Percentage of shipped items in relation to all finalized items (shipped or rejected by * the merchant; unshipped items are not taken into account), reported on the order date. Item * fill rate is lowered by merchant rejections. **This metric cannot be segmented by * customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getItemFillRate() { return itemFillRate; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Percentage of shipped items in relation to all finalized items (shipped or rejected by * the merchant; unshipped items are not taken into account), reported on the order date. Item * fill rate is lowered by merchant rejections. **This metric cannot be segmented by * customer_country_code.** * @param itemFillRate itemFillRate or {@code null} for none */ public Metrics setItemFillRate(java.lang.Double itemFillRate) { this.itemFillRate = itemFillRate; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of ordered items in micros (1 millionth of a standard unit, 1 USD = 1000000 * micros). Excludes shipping, taxes (US only), and customer cancellations that happened within 30 * minutes of placing the order. The currency of the returned value is stored in the currency_code * segment. If this metric is selected, 'segments.currency_code' is automatically added to the * SELECT clause in the search query (unless it is explicitly selected by the user) and the * currency_code segment is populated in the response. **This metric cannot be segmented by * customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getOrderedItemSalesMicros() { return orderedItemSalesMicros; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of ordered items in micros (1 millionth of a standard unit, 1 USD = 1000000 * micros). Excludes shipping, taxes (US only), and customer cancellations that happened within 30 * minutes of placing the order. The currency of the returned value is stored in the currency_code * segment. If this metric is selected, 'segments.currency_code' is automatically added to the * SELECT clause in the search query (unless it is explicitly selected by the user) and the * currency_code segment is populated in the response. **This metric cannot be segmented by * customer_country_code.** * @param orderedItemSalesMicros orderedItemSalesMicros or {@code null} for none */ public Metrics setOrderedItemSalesMicros(java.lang.Long orderedItemSalesMicros) { this.orderedItemSalesMicros = orderedItemSalesMicros; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items. Excludes customer cancellations that happened within 30 minutes * of placing the order. **This metric cannot be segmented by customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getOrderedItems() { return orderedItems; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items. Excludes customer cancellations that happened within 30 minutes * of placing the order. **This metric cannot be segmented by customer_country_code.** * @param orderedItems orderedItems or {@code null} for none */ public Metrics setOrderedItems(java.lang.Long orderedItems) { this.orderedItems = orderedItems; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of placed orders. Excludes customer cancellations that happened within 30 minutes * of placing the order. **This metric cannot be segmented by product dimensions and * customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getOrders() { return orders; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of placed orders. Excludes customer cancellations that happened within 30 minutes * of placing the order. **This metric cannot be segmented by product dimensions and * customer_country_code.** * @param orders orders or {@code null} for none */ public Metrics setOrders(java.lang.Long orders) { this.orders = orders; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items canceled by the merchant, reported on the order date. **This * metric cannot be segmented by customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getRejectedItems() { return rejectedItems; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items canceled by the merchant, reported on the order date. **This * metric cannot be segmented by customer_country_code.** * @param rejectedItems rejectedItems or {@code null} for none */ public Metrics setRejectedItems(java.lang.Long rejectedItems) { this.rejectedItems = rejectedItems; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of returned items divided by the total price of shipped items, reported on * the order date. If this metric is selected, 'segments.currency_code' is automatically added to * the SELECT clause in the search query (unless it is explicitly selected by the user) and the * currency_code segment is populated in the response. **This metric cannot be segmented by * customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getReturnRate() { return returnRate; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of returned items divided by the total price of shipped items, reported on * the order date. If this metric is selected, 'segments.currency_code' is automatically added to * the SELECT clause in the search query (unless it is explicitly selected by the user) and the * currency_code segment is populated in the response. **This metric cannot be segmented by * customer_country_code.** * @param returnRate returnRate or {@code null} for none */ public Metrics setReturnRate(java.lang.Double returnRate) { this.returnRate = returnRate; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items sent back for return, reported on the date when the merchant * accepted the return. **This metric cannot be segmented by customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getReturnedItems() { return returnedItems; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items sent back for return, reported on the date when the merchant * accepted the return. **This metric cannot be segmented by customer_country_code.** * @param returnedItems returnedItems or {@code null} for none */ public Metrics setReturnedItems(java.lang.Long returnedItems) { this.returnedItems = returnedItems; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of ordered items sent back for return in micros (1 millionth of a standard * unit, 1 USD = 1000000 micros), reported on the date when the merchant accepted the return. The * currency of the returned value is stored in the currency_code segment. If this metric is * selected, 'segments.currency_code' is automatically added to the SELECT clause in the search * query (unless it is explicitly selected by the user) and the currency_code segment is populated * in the response. **This metric cannot be segmented by customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getReturnsMicros() { return returnsMicros; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of ordered items sent back for return in micros (1 millionth of a standard * unit, 1 USD = 1000000 micros), reported on the date when the merchant accepted the return. The * currency of the returned value is stored in the currency_code segment. If this metric is * selected, 'segments.currency_code' is automatically added to the SELECT clause in the search * query (unless it is explicitly selected by the user) and the currency_code segment is populated * in the response. **This metric cannot be segmented by customer_country_code.** * @param returnsMicros returnsMicros or {@code null} for none */ public Metrics setReturnsMicros(java.lang.Long returnsMicros) { this.returnsMicros = returnsMicros; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of shipped items in micros (1 millionth of a standard unit, 1 USD = 1000000 * micros), reported on the order date. Excludes shipping and taxes (US only). The currency of the * returned value is stored in the currency_code segment. If this metric is selected, * 'segments.currency_code' is automatically added to the SELECT clause in the search query * (unless it is explicitly selected by the user) and the currency_code segment is populated in * the response. **This metric cannot be segmented by customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getShippedItemSalesMicros() { return shippedItemSalesMicros; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Total price of shipped items in micros (1 millionth of a standard unit, 1 USD = 1000000 * micros), reported on the order date. Excludes shipping and taxes (US only). The currency of the * returned value is stored in the currency_code segment. If this metric is selected, * 'segments.currency_code' is automatically added to the SELECT clause in the search query * (unless it is explicitly selected by the user) and the currency_code segment is populated in * the response. **This metric cannot be segmented by customer_country_code.** * @param shippedItemSalesMicros shippedItemSalesMicros or {@code null} for none */ public Metrics setShippedItemSalesMicros(java.lang.Long shippedItemSalesMicros) { this.shippedItemSalesMicros = shippedItemSalesMicros; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of shipped items, reported on the shipment date. **This metric cannot be segmented * by customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getShippedItems() { return shippedItems; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of shipped items, reported on the shipment date. **This metric cannot be segmented * by customer_country_code.** * @param shippedItems shippedItems or {@code null} for none */ public Metrics setShippedItems(java.lang.Long shippedItems) { this.shippedItems = shippedItems; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of fully shipped orders, reported on the last shipment date. **This metric cannot * be segmented by product dimensions and customer_country_code.** * @return value or {@code null} for none */ public java.lang.Long getShippedOrders() { return shippedOrders; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of fully shipped orders, reported on the last shipment date. **This metric cannot * be segmented by product dimensions and customer_country_code.** * @param shippedOrders shippedOrders or {@code null} for none */ public Metrics setShippedOrders(java.lang.Long shippedOrders) { this.shippedOrders = shippedOrders; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items not shipped up until the end of the queried day. If a multi-day * period is specified in the search query, the returned value is the average number of unshipped * items over the days in the queried period. **This metric cannot be segmented by * customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getUnshippedItems() { return unshippedItems; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of ordered items not shipped up until the end of the queried day. If a multi-day * period is specified in the search query, the returned value is the average number of unshipped * items over the days in the queried period. **This metric cannot be segmented by * customer_country_code.** * @param unshippedItems unshippedItems or {@code null} for none */ public Metrics setUnshippedItems(java.lang.Double unshippedItems) { this.unshippedItems = unshippedItems; return this; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of orders not shipped or partially shipped up until the end of the queried day. If * a multi-day period is specified in the search query, the returned value is the average number * of unshipped orders over the days in the queried period. **This metric cannot be segmented by * product dimensions and customer_country_code.** * @return value or {@code null} for none */ public java.lang.Double getUnshippedOrders() { return unshippedOrders; } /** * *Deprecated*: This field is no longer supported and retrieving it returns 0 starting from May * 2024. Number of orders not shipped or partially shipped up until the end of the queried day. If * a multi-day period is specified in the search query, the returned value is the average number * of unshipped orders over the days in the queried period. **This metric cannot be segmented by * product dimensions and customer_country_code.** * @param unshippedOrders unshippedOrders or {@code null} for none */ public Metrics setUnshippedOrders(java.lang.Double unshippedOrders) { this.unshippedOrders = unshippedOrders; return this; } @Override public Metrics set(String fieldName, Object value) { return (Metrics) super.set(fieldName, value); } @Override public Metrics clone() { return (Metrics) super.clone(); } }
apache/manifoldcf
36,308
connectors/jdbc/connector/src/main/java/org/apache/manifoldcf/authorities/authorities/jdbc/JDBCAuthority.java
/* * Copyright 2012 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.authorities.authorities.jdbc; import java.io.IOException; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.manifoldcf.agents.interfaces.ServiceInterruption; import org.apache.manifoldcf.authorities.authorities.BaseAuthorityConnector; import org.apache.manifoldcf.authorities.interfaces.AuthorizationResponse; import org.apache.manifoldcf.core.cachemanager.BaseDescription; import org.apache.manifoldcf.core.interfaces.BinaryInput; import org.apache.manifoldcf.core.interfaces.CacheManagerFactory; import org.apache.manifoldcf.core.interfaces.ConfigParams; import org.apache.manifoldcf.core.interfaces.ICacheCreateHandle; import org.apache.manifoldcf.core.interfaces.ICacheDescription; import org.apache.manifoldcf.core.interfaces.ICacheHandle; import org.apache.manifoldcf.core.interfaces.ICacheManager; import org.apache.manifoldcf.core.interfaces.IHTTPOutput; import org.apache.manifoldcf.connectorcommon.interfaces.IKeystoreManager; import org.apache.manifoldcf.core.interfaces.IPostParameters; import org.apache.manifoldcf.core.interfaces.IThreadContext; import org.apache.manifoldcf.connectorcommon.interfaces.KeystoreManagerFactory; import org.apache.manifoldcf.core.interfaces.ManifoldCFException; import org.apache.manifoldcf.core.interfaces.StringSet; import org.apache.manifoldcf.core.interfaces.TimeMarker; import org.apache.manifoldcf.core.interfaces.IResultRow; import org.apache.manifoldcf.jdbc.JDBCConnection; import org.apache.manifoldcf.jdbc.JDBCConstants; import org.apache.manifoldcf.jdbc.IDynamicResultSet; import org.apache.manifoldcf.jdbc.IDynamicResultRow; import org.apache.manifoldcf.authorities.system.Logging; /** * * @author krycek */ public class JDBCAuthority extends BaseAuthorityConnector { public static final String _rcsid = "@(#)$Id: JDBCAuthority.java $"; protected JDBCConnection connection = null; protected String jdbcProvider = null; protected String accessMethod = null; protected String host = null; protected String databaseName = null; protected String rawDriverString = null; protected String userName = null; protected String password = null; protected String idQuery = null; protected String tokenQuery = null; private long responseLifetime = 60000L; //60sec private int LRUsize = 1000; /** * Cache manager. */ private ICacheManager cacheManager = null; /** * Set thread context. */ @Override public void setThreadContext(IThreadContext tc) throws ManifoldCFException { super.setThreadContext(tc); cacheManager = CacheManagerFactory.make(tc); } /** * Connect. The configuration parameters are included. * * @param configParams are the configuration parameters for this connection. */ @Override public void connect(ConfigParams configParams) { super.connect(configParams); jdbcProvider = configParams.getParameter(JDBCConstants.providerParameter); accessMethod = configParams.getParameter(JDBCConstants.methodParameter); host = configParams.getParameter(JDBCConstants.hostParameter); databaseName = configParams.getParameter(JDBCConstants.databaseNameParameter); rawDriverString = configParams.getParameter(JDBCConstants.driverStringParameter); userName = configParams.getParameter(JDBCConstants.databaseUserName); password = configParams.getObfuscatedParameter(JDBCConstants.databasePassword); idQuery = configParams.getParameter(JDBCConstants.databaseUserIdQuery); tokenQuery = configParams.getParameter(JDBCConstants.databaseTokensQuery); } /** * Check status of connection. */ @Override public String check() throws ManifoldCFException { try { getSession(); // Attempt to fetch a connection; if this succeeds we pass connection.testConnection(); return super.check(); } catch (ServiceInterruption e) { if (Logging.authorityConnectors.isDebugEnabled()) { Logging.authorityConnectors.debug("Service interruption in check(): " + e.getMessage(), e); } return "Transient error: " + e.getMessage(); } } /** * Close the connection. Call this before discarding the repository connector. */ @Override public void disconnect() throws ManifoldCFException { connection = null; host = null; jdbcProvider = null; accessMethod = null; databaseName = null; rawDriverString = null; userName = null; password = null; super.disconnect(); } /** * Set up a session */ protected void getSession() throws ManifoldCFException, ServiceInterruption { if (connection == null) { if (jdbcProvider == null || jdbcProvider.length() == 0) { throw new ManifoldCFException("Missing parameter '" + JDBCConstants.providerParameter + "'"); } if ((host == null || host.length() == 0) && (rawDriverString == null || rawDriverString.length() == 0)) throw new ManifoldCFException("Missing parameter '"+JDBCConstants.hostParameter+"' or '"+JDBCConstants.driverStringParameter+"'"); connection = new JDBCConnection(jdbcProvider,(accessMethod==null || accessMethod.equals("name")),host,databaseName,rawDriverString,userName,password); } } private String createCacheConnectionString() { StringBuilder sb = new StringBuilder(); sb.append(jdbcProvider).append("|") .append((host==null)?"":host).append("|") .append((databaseName==null)?"":databaseName).append("|") .append((rawDriverString==null)?"":rawDriverString).append("|") .append(userName); return sb.toString(); } /** * Obtain the access tokens for a given user name. * * @param userName is the user name or identifier. * @return the response tokens (according to the current authority). (Should * throws an exception only when a condition cannot be properly described * within the authorization response object.) */ @Override public AuthorizationResponse getAuthorizationResponse(String userName) throws ManifoldCFException { // Construct a cache description object ICacheDescription objectDescription = new JdbcAuthorizationResponseDescription(userName, createCacheConnectionString(), idQuery, tokenQuery, this.responseLifetime, this.LRUsize); // Enter the cache ICacheHandle ch = cacheManager.enterCache(new ICacheDescription[]{objectDescription}, null, null); try { ICacheCreateHandle createHandle = cacheManager.enterCreateSection(ch); try { // Lookup the object AuthorizationResponse response = (AuthorizationResponse) cacheManager.lookupObject(createHandle, objectDescription); if (response != null) { return response; } // Create the object. response = getAuthorizationResponseUncached(userName); // Save it in the cache cacheManager.saveObject(createHandle, objectDescription, response); // And return it... return response; } finally { cacheManager.leaveCreateSection(createHandle); } } finally { cacheManager.leaveCache(ch); } } public AuthorizationResponse getAuthorizationResponseUncached(String userName) throws ManifoldCFException { try { getSession(); VariableMap vm = new VariableMap(); addConstant(vm, JDBCConstants.idReturnVariable, JDBCConstants.idReturnColumnName); addVariable(vm, JDBCConstants.userNameVariable, userName); // Find user id ArrayList paramList = new ArrayList(); StringBuilder sb = new StringBuilder(); substituteQuery(idQuery, vm, sb, paramList); IDynamicResultSet idSet; try { idSet = connection.executeUncachedQuery(sb.toString(),paramList,-1); } catch (ServiceInterruption e) { return RESPONSE_UNREACHABLE; } catch (ManifoldCFException e) { throw e; } String uid; try { IDynamicResultRow row = idSet.getNextRow(); if (row == null) return RESPONSE_USERNOTFOUND; try { Object oUid = row.getValue(JDBCConstants.idReturnColumnName); if (oUid == null) throw new ManifoldCFException("Bad id query; doesn't return $(IDCOLUMN) column. Try using quotes around $(IDCOLUMN) variable, e.g. \"$(IDCOLUMN)\"."); uid = JDBCConnection.readAsString(oUid); } finally { row.close(); } } finally { idSet.close(); } if (uid.isEmpty()) { return RESPONSE_USERNOTFOUND; } // now check tokens vm = new VariableMap(); addConstant(vm, JDBCConstants.tokenReturnVariable, JDBCConstants.tokenReturnColumnName); addVariable(vm, JDBCConstants.userNameVariable, userName); addVariable(vm, JDBCConstants.userIDVariable, uid); sb = new StringBuilder(); paramList = new ArrayList(); substituteQuery(tokenQuery, vm, sb, paramList); try { idSet = connection.executeUncachedQuery(sb.toString(),paramList,-1); } catch (ServiceInterruption e) { return RESPONSE_UNREACHABLE; } catch (ManifoldCFException e) { throw e; } ArrayList<String> tokenArray = new ArrayList<String>(); try { while (true) { IDynamicResultRow row = idSet.getNextRow(); if (row == null) break; try { Object oToken = row.getValue(JDBCConstants.tokenReturnColumnName); if (oToken == null) throw new ManifoldCFException("Bad token query; doesn't return $(TOKENCOLUMN) column. Try using quotes around $(TOKENCOLUMN) variable, e.g. \"$(TOKENCOLUMN)\"."); String token = JDBCConnection.readAsString(oToken); if (!token.isEmpty()) { tokenArray.add(token); } } finally { row.close(); } } } finally { idSet.close(); } return new AuthorizationResponse(tokenArray.toArray(new String[0]), AuthorizationResponse.RESPONSE_OK); } catch (ServiceInterruption e) { Logging.authorityConnectors.warn("JDBCAuthority: Service interruption: "+e.getMessage(),e); return RESPONSE_UNREACHABLE; } } // UI support methods. // // These support methods come in two varieties. The first bunch is involved in setting up connection configuration information. The second bunch // is involved in presenting and editing document specification information for a job. The two kinds of methods are accordingly treated differently, // in that the first bunch cannot assume that the current connector object is connected, while the second bunch can. That is why the first bunch // receives a thread context argument for all UI methods, while the second bunch does not need one (since it has already been applied via the connect() // method, above). /** * Output the configuration header section. This method is called in the head * section of the connector's configuration page. Its purpose is to add the * required tabs to the list, and to output any javascript methods that might * be needed by the configuration editing HTML. * * @param threadContext is the local thread context. * @param out is the output to which any HTML should be sent. * @param parameters are the configuration parameters, as they currently * exist, for this connection being configured. * @param tabsArray is an array of tab names. Add to this array any tab names * that are specific to the connector. */ @Override public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException { tabsArray.add(Messages.getString(locale, "JDBCAuthority.DatabaseType")); tabsArray.add(Messages.getString(locale, "JDBCAuthority.Server")); tabsArray.add(Messages.getString(locale, "JDBCAuthority.Credentials")); tabsArray.add(Messages.getString(locale, "JDBCAuthority.Queries")); out.print( "<script type=\"text/javascript\">\n"+ "<!--\n"+ "function checkConfigForSave()\n"+ "{\n"+ " if (editconnection.databasehost.value == \"\" && editconnection.rawjdbcstring.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale, "JDBCAuthority.PleaseFillInADatabaseServerName") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale, "JDBCAuthority.Server") + "\");\n"+ " editconnection.databasehost.focus();\n"+ " return false;\n"+ " }\n"+ " if (editconnection.databasename.value == \"\" && editconnection.rawjdbcstring.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale, "JDBCAuthority.PleaseFillInTheNameOfTheDatabase") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale, "JDBCAuthority.Server") + "\");\n"+ " editconnection.databasename.focus();\n"+ " return false;\n"+ " }\n"+ " if (editconnection.username.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale, "JDBCAuthority.PleaseSupplyTheDatabaseUsernameForThisConnection") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale, "JDBCAuthority.Credentials") + "\");\n"+ " editconnection.username.focus();\n"+ " return false;\n"+ " }\n"+ " return true;\n"+ "}\n"+ "\n"+ "//-->\n"+ "</script>\n" ); } /** * Output the configuration body section. This method is called in the body * section of the connector's configuration page. Its purpose is to present * the required form elements for editing. The coder can presume that the HTML * that is output from this configuration will be within appropriate &lt;html&gt;, * &lt;body&gt;, and &lt;form&gt; tags. The name of the form is "editconnection". * * @param threadContext is the local thread context. * @param out is the output to which any HTML should be sent. * @param parameters are the configuration parameters, as they currently * exist, for this connection being configured. * @param tabName is the current tab name. */ @Override public void outputConfigurationBody(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException { String lJdbcProvider = parameters.getParameter(JDBCConstants.providerParameter); if (lJdbcProvider == null) { lJdbcProvider = "oracle:thin:@"; } String lAccessMethod = parameters.getParameter(JDBCConstants.methodParameter); if (lAccessMethod == null) lAccessMethod = "name"; String lHost = parameters.getParameter(JDBCConstants.hostParameter); if (lHost == null) { lHost = "localhost"; } String lDatabaseName = parameters.getParameter(JDBCConstants.databaseNameParameter); if (lDatabaseName == null) { lDatabaseName = "database"; } String rawJDBCString = parameters.getParameter(JDBCConstants.driverStringParameter); if (rawJDBCString == null) rawJDBCString = ""; String databaseUser = parameters.getParameter(JDBCConstants.databaseUserName); if (databaseUser == null) { databaseUser = ""; } String databasePassword = parameters.getObfuscatedParameter(JDBCConstants.databasePassword); if (databasePassword == null) { databasePassword = ""; } else { databasePassword = out.mapPasswordToKey(databasePassword); } String lIdQuery = parameters.getParameter(JDBCConstants.databaseUserIdQuery); if (lIdQuery == null) { lIdQuery = "SELECT idfield AS $(IDCOLUMN) FROM usertable WHERE login = $(USERNAME)"; } String lTokenQuery = parameters.getParameter(JDBCConstants.databaseTokensQuery); if (lTokenQuery == null) { lTokenQuery = "SELECT groupnamefield AS $(TOKENCOLUMN) FROM grouptable WHERE user_id = $(UID) OR login = $(USERNAME)"; } // "Database Type" tab if (tabName.equals(Messages.getString(locale, "JDBCAuthority.DatabaseType"))) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.DatabaseType2") + "</nobr></td><td class=\"value\">\n"+ " <select multiple=\"false\" name=\"databasetype\" size=\"2\">\n"+ " <option value=\"oracle:thin:@\" " + (lJdbcProvider.equals("oracle:thin:@") ? "selected=\"selected\"" : "") + ">Oracle</option>\n"+ " <option value=\"postgresql://\" " + (lJdbcProvider.equals("postgresql:") ? "selected=\"selected\"" : "") + ">Postgres SQL</option>\n"+ " <option value=\"jtds:sqlserver://\" " + (lJdbcProvider.equals("jtds:sqlserver:") ? "selected=\"selected\"" : "") + ">MS SQL Server (&gt; V6.5)</option>\n"+ " <option value=\"jtds:sybase://\" " + (lJdbcProvider.equals("jtds:sybase:") ? "selected=\"selected\"" : "") + ">Sybase (&gt;= V10)</option>\n"+ " <option value=\"mysql://\" " + (lJdbcProvider.equals("mysql:") ? "selected=\"selected\"" : "") + ">MySQL (&gt;= V5)</option>\n"+ " <option value=\"mariadb://\" " + (lJdbcProvider.equals("mariadb:") ? "selected=\"selected\"" : "") + ">MariaDB</option>\n"+ " </select>\n"+ " </td>\n"+ " </tr>\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"JDBCAuthority.AccessMethod") + "</nobr></td><td class=\"value\">\n"+ " <select multiple=\"false\" name=\"accessmethod\" size=\"2\">\n"+ " <option value=\"name\" "+(lAccessMethod.equals("name")?"selected=\"selected\"":"")+">"+Messages.getBodyString(locale,"JDBCAuthority.ByName")+"</option>\n"+ " <option value=\"label\" "+(lAccessMethod.equals("label")?"selected=\"selected\"":"")+">"+Messages.getBodyString(locale,"JDBCAuthority.ByLabel")+"</option>\n"+ " </select>\n"+ " </td>\n"+ " </tr>\n"+ "</table>\n"); } else { out.print( "<input type=\"hidden\" name=\"databasetype\" value=\"" + lJdbcProvider + "\"/>\n"+ "<input type=\"hidden\" name=\"accessmethod\" value=\""+lAccessMethod+"\"/>\n" ); } // "Server" tab if (tabName.equals(Messages.getString(locale, "JDBCAuthority.Server"))) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.DatabaseHostAndPort") + "</nobr></td><td class=\"value\"><input type=\"text\" size=\"64\" name=\"databasehost\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(lHost) + "\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.DatabaseServiceNameOrInstanceDatabase") + "</nobr></td><td class=\"value\"><input type=\"text\" size=\"32\" name=\"databasename\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(lDatabaseName) + "\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"JDBCAuthority.RawDatabaseConnectString") + "</nobr></td><td class=\"value\"><input type=\"text\" size=\"80\" name=\"rawjdbcstring\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(rawJDBCString)+"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\"databasehost\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(lHost) + "\"/>\n"+ "<input type=\"hidden\" name=\"databasename\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(lDatabaseName) + "\"/>\n"+ "<input type=\"hidden\" name=\"rawjdbcstring\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(rawJDBCString)+"\"/>\n" ); } // "Credentials" tab if (tabName.equals(Messages.getString(locale, "JDBCAuthority.Credentials"))) { out.print( "<table class=\"displaytable\">\n" + " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" + " <tr>\n" + " <td class=\"description\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.UserName") + "</nobr></td><td class=\"value\"><input type=\"text\" size=\"32\" name=\"username\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(databaseUser) + "\"/></td>\n" + " </tr>\n" + " <tr>\n" + " <td class=\"description\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.Password") + "</nobr></td><td class=\"value\"><input type=\"password\" size=\"32\" name=\"password\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(databasePassword) + "\"/></td>\n" + " </tr>\n" + "</table>\n"); } else { out.print( "<input type=\"hidden\" name=\"username\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(databaseUser) + "\"/>\n" + "<input type=\"hidden\" name=\"password\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(databasePassword) + "\"/>\n"); } if (tabName.equals(Messages.getString(locale, "JDBCAuthority.Queries"))) { out.print( "<table class=\"displaytable\">\n" + " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" + " <tr>\n" + " <td class=\"description\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.UserIdQuery") + "</nobr><br/><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.returnUserIdOrEmptyResultset") + "</nobr></td>\n" + " <td class=\"value\"><textarea name=\"idquery\" cols=\"64\" rows=\"6\">" + org.apache.manifoldcf.ui.util.Encoder.bodyEscape(lIdQuery) + "</textarea></td>\n" + " </tr>\n" + " <tr>\n" + " <td class=\"description\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.TokenQuery") + "</nobr><br/><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.returnTokensForUser") + "</nobr></td>\n" + " <td class=\"value\"><textarea name=\"tokenquery\" cols=\"64\" rows=\"6\">" + org.apache.manifoldcf.ui.util.Encoder.bodyEscape(lTokenQuery) + "</textarea></td>\n" + " </tr>\n" + "</table>\n"); } else { out.print( "<input type=\"hidden\" name=\"idquery\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(lIdQuery) + "\"/>\n" + "<input type=\"hidden\" name=\"tokenquery\" value=\"" + org.apache.manifoldcf.ui.util.Encoder.attributeEscape(lTokenQuery) + "\"/>\n"); } } /** * Process a configuration post. This method is called at the start of the * connector's configuration page, whenever there is a possibility that form * data for a connection has been posted. Its purpose is to gather form * information and modify the configuration parameters accordingly. The name * of the posted form is "editconnection". * * @param threadContext is the local thread context. * @param variableContext is the set of variables available from the post, * including binary file post information. * @param parameters are the configuration parameters, as they currently * exist, for this connection being configured. * @return null if all is well, or a string error message if there is an error * that should prevent saving of the connection (and cause a redirection to an * error page). */ @Override public String processConfigurationPost(IThreadContext threadContext, IPostParameters variableContext, Locale locale, ConfigParams parameters) throws ManifoldCFException { String type = variableContext.getParameter("databasetype"); if (type != null) { parameters.setParameter(JDBCConstants.providerParameter, type); } String accessMethod = variableContext.getParameter("accessmethod"); if (accessMethod != null) parameters.setParameter(JDBCConstants.methodParameter,accessMethod); String lHost = variableContext.getParameter("databasehost"); if (lHost != null) { parameters.setParameter(JDBCConstants.hostParameter, lHost); } String lDatabaseName = variableContext.getParameter("databasename"); if (lDatabaseName != null) { parameters.setParameter(JDBCConstants.databaseNameParameter, lDatabaseName); } String rawJDBCString = variableContext.getParameter("rawjdbcstring"); if (rawJDBCString != null) parameters.setParameter(JDBCConstants.driverStringParameter,rawJDBCString); String lUserName = variableContext.getParameter("username"); if (lUserName != null) { parameters.setParameter(JDBCConstants.databaseUserName, lUserName); } String lPassword = variableContext.getParameter("password"); if (lPassword != null) { parameters.setObfuscatedParameter(JDBCConstants.databasePassword, variableContext.mapKeyToPassword(lPassword)); } String lIdQuery = variableContext.getParameter("idquery"); if (lIdQuery != null) { parameters.setParameter(JDBCConstants.databaseUserIdQuery, lIdQuery); } String lTokenQuery = variableContext.getParameter("tokenquery"); if (lTokenQuery != null) { parameters.setParameter(JDBCConstants.databaseTokensQuery, lTokenQuery); } return null; } /** * View configuration. This method is called in the body section of the * connector's view configuration page. Its purpose is to present the * connection information to the user. The coder can presume that the HTML * that is output from this configuration will be within appropriate &lt;html&gt; * and &lt;body&gt; tags. * * @param threadContext is the local thread context. * @param out is the output to which any HTML should be sent. * @param parameters are the configuration parameters, as they currently * exist, for this connection being configured. */ @Override public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException { out.print( "<table class=\"displaytable\">\n" + " <tr>\n" + " <td class=\"description\" colspan=\"1\"><nobr>" + Messages.getBodyString(locale, "JDBCAuthority.Parameters") + "</nobr></td>\n" + " <td class=\"value\" colspan=\"3\">\n"); Iterator iter = parameters.listParameters(); while (iter.hasNext()) { String param = (String) iter.next(); String value = parameters.getParameter(param); if (param.length() >= "password".length() && param.substring(param.length() - "password".length()).equalsIgnoreCase("password")) { out.print( " <nobr>" + org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param) + "=********</nobr><br/>\n"); } else if (param.length() >= "keystore".length() && param.substring(param.length() - "keystore".length()).equalsIgnoreCase("keystore")) { IKeystoreManager kmanager = KeystoreManagerFactory.make("", value); out.print( " <nobr>" + org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param) + "=&lt;" + Integer.toString(kmanager.getContents().length) + " certificate(s)&gt;</nobr><br/>\n"); } else { out.print( " <nobr>" + org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param) + "=" + org.apache.manifoldcf.ui.util.Encoder.bodyEscape(value) + "</nobr><br/>\n"); } } out.print( " </td>\n" + " </tr>\n" + "</table>\n"); } /** * Given a query, and a parameter map, substitute it. Each variable * substitutes the string, and it also substitutes zero or more query * parameters. */ protected static void substituteQuery(String inputString, VariableMap inputMap, StringBuilder outputQuery, ArrayList outputParams) throws ManifoldCFException { // We are looking for strings that look like this: $(something) // Right at the moment we don't care even about quotes, so we just want to look for $(. int startIndex = 0; while (true) { int nextIndex = inputString.indexOf("$(", startIndex); if (nextIndex == -1) { outputQuery.append(inputString.substring(startIndex)); break; } int endIndex = inputString.indexOf(")", nextIndex); if (endIndex == -1) { outputQuery.append(inputString.substring(startIndex)); break; } String variableName = inputString.substring(nextIndex + 2, endIndex); VariableMapItem item = inputMap.getVariable(variableName); if (item == null) { throw new ManifoldCFException("No such substitution variable: $(" + variableName + ")"); } outputQuery.append(inputString.substring(startIndex, nextIndex)); outputQuery.append(item.getValue()); ArrayList inputParams = item.getParameters(); if (inputParams != null) { int i = 0; while (i < inputParams.size()) { Object x = inputParams.get(i++); outputParams.add(x); } } startIndex = endIndex + 1; } } /** * Add string query variables */ protected static void addVariable(VariableMap map, String varName, String variable) { ArrayList params = new ArrayList(); params.add(variable); map.addVariable(varName, "?", params); } /** * Add string query constants */ protected static void addConstant(VariableMap map, String varName, String value) { map.addVariable(varName, value, null); } // pass params to preparedStatement protected static void loadPS(PreparedStatement ps, ArrayList data) throws java.sql.SQLException, ManifoldCFException { if (data != null) { for (int i = 0; i < data.size(); i++) { // If the input type is a string, then set it as such. // Otherwise, if it's an input stream, we make a blob out of it. Object x = data.get(i); if (x instanceof String) { String value = (String) x; // letting database do lame conversion! ps.setString(i + 1, value); } if (x instanceof BinaryInput) { BinaryInput value = (BinaryInput) x; // System.out.println("Blob length on write = "+Long.toString(value.getLength())); // The oracle driver does a binary conversion to base 64 when writing data // into a clob column using a binary stream operator. Since at this // point there is no way to distinguish the two, and since our tests use CLOB, // this code doesn't work for them. // So, for now, use the ascii stream method. //ps.setBinaryStream(i+1,value.getStream(),(int)value.getLength()); ps.setAsciiStream(i + 1, value.getStream(), (int) value.getLength()); } if (x instanceof java.util.Date) { ps.setDate(i + 1, new java.sql.Date(((java.util.Date) x).getTime())); } if (x instanceof Long) { ps.setLong(i + 1, ((Long) x).longValue()); } if (x instanceof TimeMarker) { ps.setTimestamp(i + 1, new java.sql.Timestamp(((Long) x).longValue())); } if (x instanceof Double) { ps.setDouble(i + 1, ((Double) x).doubleValue()); } if (x instanceof Integer) { ps.setInt(i + 1, ((Integer) x).intValue()); } if (x instanceof Float) { ps.setFloat(i + 1, ((Float) x).floatValue()); } } } } /** * Variable map entry. */ protected static class VariableMapItem { protected String value; protected ArrayList params; /** * Constructor. */ public VariableMapItem(String value, ArrayList params) { this.value = value; this.params = params; } /** * Get value. */ public String getValue() { return value; } /** * Get parameters. */ public ArrayList getParameters() { return params; } } /** * Variable map. */ protected static class VariableMap { protected Map variableMap = new HashMap(); /** * Constructor */ public VariableMap() { } /** * Add a variable map entry */ public void addVariable(String variableName, String value, ArrayList parameters) { VariableMapItem e = new VariableMapItem(value, parameters); variableMap.put(variableName, e); } /** * Get a variable map entry */ public VariableMapItem getVariable(String variableName) { return (VariableMapItem) variableMap.get(variableName); } } protected static StringSet emptyStringSet = new StringSet(); /** * This is the cache object descriptor for cached access tokens from this * connector. */ protected class JdbcAuthorizationResponseDescription extends BaseDescription { /** * The user name */ protected final String userName; /** * LDAP connection string with server name and base DN */ protected final String connectionString; /** The user query. */ protected final String userQuery; /** The token query. */ protected final String tokenQuery; /** * The response lifetime */ protected final long responseLifetime; /** * The expiration time */ protected long expirationTime = -1; /** * Constructor. */ public JdbcAuthorizationResponseDescription(String userName, String connectionString, String userQuery, String tokenQuery, long responseLifetime, int LRUsize) { super("JDBCAuthority", LRUsize); this.userName = userName; this.connectionString = connectionString; this.userQuery = userQuery; this.tokenQuery = tokenQuery; this.responseLifetime = responseLifetime; } /** * Return the invalidation keys for this object. */ public StringSet getObjectKeys() { return emptyStringSet; } /** * Get the critical section name, used for synchronizing the creation of the * object */ public String getCriticalSectionName() { StringBuilder sb = new StringBuilder(getClass().getName()); sb.append("-").append(userName).append("-").append(connectionString).append("-") .append(userQuery).append("-").append(tokenQuery); return sb.toString(); } /** * Return the object expiration interval */ @Override public long getObjectExpirationTime(long currentTime) { if (expirationTime == -1) { expirationTime = currentTime + responseLifetime; } return expirationTime; } @Override public int hashCode() { return userName.hashCode() + connectionString.hashCode() + userQuery.hashCode() + tokenQuery.hashCode(); } @Override public boolean equals(Object o) { if (!(o instanceof JdbcAuthorizationResponseDescription)) { return false; } JdbcAuthorizationResponseDescription ard = (JdbcAuthorizationResponseDescription) o; if (!ard.userName.equals(userName)) { return false; } if (!ard.connectionString.equals(connectionString)) { return false; } if (!ard.userQuery.equals(userQuery)) { return false; } if (!ard.tokenQuery.equals(tokenQuery)) { return false; } return true; } } }
apache/ozone
37,387
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestContainerStateMachineFailures.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.client.rpc; import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_COMMAND_STATUS_REPORT_INTERVAL; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_CONTAINER_REPORT_INTERVAL; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_HEARTBEAT_INTERVAL; import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PIPELINE_REPORT_INTERVAL; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerDataProto.State.QUASI_CLOSED; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerDataProto.State.UNHEALTHY; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_STALENODE_INTERVAL; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdds.HddsUtils; import org.apache.hadoop.hdds.client.ReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationFactor; import org.apache.hadoop.hdds.client.ReplicationType; import org.apache.hadoop.hdds.conf.DatanodeRatisServerConfig; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeID; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.ratis.conf.RatisClientConfig; import org.apache.hadoop.hdds.scm.OzoneClientConfig; import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.XceiverClientManager; import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.client.HddsClientUtils; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerNotOpenException; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.utils.HddsServerUtil; import org.apache.hadoop.ozone.HddsDatanodeService; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.client.ObjectStore; import org.apache.hadoop.ozone.client.OzoneClient; import org.apache.hadoop.ozone.client.OzoneClientFactory; import org.apache.hadoop.ozone.client.io.KeyOutputStream; import org.apache.hadoop.ozone.client.io.OzoneInputStream; import org.apache.hadoop.ozone.client.io.OzoneOutputStream; import org.apache.hadoop.ozone.container.ContainerTestHelper; import org.apache.hadoop.ozone.container.TestHelper; import org.apache.hadoop.ozone.container.common.impl.ContainerData; import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml; import org.apache.hadoop.ozone.container.common.impl.HddsDispatcher; import org.apache.hadoop.ozone.container.common.interfaces.Container; import org.apache.hadoop.ozone.container.common.transport.server.ratis.ContainerStateMachine; import org.apache.hadoop.ozone.container.common.transport.server.ratis.XceiverServerRatis; import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer; import org.apache.hadoop.ozone.om.helpers.OmKeyArgs; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo; import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand; import org.apache.hadoop.ozone.protocol.commands.SCMCommand; import org.apache.ozone.test.GenericTestUtils; import org.apache.ozone.test.LambdaTestUtils; import org.apache.ozone.test.tag.Flaky; import org.apache.ratis.protocol.RaftGroupId; import org.apache.ratis.protocol.exceptions.StateMachineException; import org.apache.ratis.server.storage.FileInfo; import org.apache.ratis.statemachine.impl.SimpleStateMachineStorage; import org.apache.ratis.statemachine.impl.StatemachineImplTestUtil; import org.apache.ratis.thirdparty.com.google.protobuf.ByteString; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; /** * Tests the containerStateMachine failure handling. */ public class TestContainerStateMachineFailures { private static MiniOzoneCluster cluster; private static OzoneClient client; private static ObjectStore objectStore; private static String volumeName; private static String bucketName; private static XceiverClientManager xceiverClientManager; /** * Create a MiniDFSCluster for testing. * * @throws IOException */ @BeforeAll public static void init() throws Exception { OzoneConfiguration conf = new OzoneConfiguration(); OzoneClientConfig clientConfig = conf.getObject(OzoneClientConfig.class); clientConfig.setStreamBufferFlushDelay(false); conf.setFromObject(clientConfig); conf.setTimeDuration(HDDS_CONTAINER_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_COMMAND_STATUS_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_PIPELINE_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_HEARTBEAT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 30, TimeUnit.SECONDS); conf.set(OzoneConfigKeys.OZONE_SCM_CLOSE_CONTAINER_WAIT_DURATION, "2s"); conf.set(ScmConfigKeys.OZONE_SCM_PIPELINE_SCRUB_INTERVAL, "2s"); conf.set(ScmConfigKeys.OZONE_SCM_PIPELINE_DESTROY_TIMEOUT, "5s"); RatisClientConfig ratisClientConfig = conf.getObject(RatisClientConfig.class); ratisClientConfig.setWriteRequestTimeout(Duration.ofSeconds(20)); ratisClientConfig.setWatchRequestTimeout(Duration.ofSeconds(20)); conf.setFromObject(ratisClientConfig); DatanodeRatisServerConfig ratisServerConfig = conf.getObject(DatanodeRatisServerConfig.class); ratisServerConfig.setRequestTimeOut(Duration.ofSeconds(3)); ratisServerConfig.setWatchTimeOut(Duration.ofSeconds(10)); conf.setFromObject(ratisServerConfig); RatisClientConfig.RaftConfig raftClientConfig = conf.getObject(RatisClientConfig.RaftConfig.class); raftClientConfig.setRpcRequestTimeout(Duration.ofSeconds(3)); raftClientConfig.setRpcWatchRequestTimeout(Duration.ofSeconds(20)); conf.setFromObject(raftClientConfig); conf.setLong(OzoneConfigKeys.HDDS_RATIS_SNAPSHOT_THRESHOLD_KEY, 1); conf.setQuietMode(false); cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(10) .build(); cluster.waitForClusterToBeReady(); cluster.waitForPipelineTobeReady(HddsProtos.ReplicationFactor.ONE, 60000); //the easiest way to create an open container is creating a key client = OzoneClientFactory.getRpcClient(conf); objectStore = client.getObjectStore(); xceiverClientManager = new XceiverClientManager(conf); volumeName = "testcontainerstatemachinefailures"; bucketName = volumeName; objectStore.createVolume(volumeName); objectStore.getVolume(volumeName).createBucket(bucketName); } /** * Shutdown MiniDFSCluster. */ @AfterAll public static void shutdown() { IOUtils.closeQuietly(client); if (xceiverClientManager != null) { xceiverClientManager.close(); } if (cluster != null) { cluster.shutdown(); } } @Test public void testContainerStateMachineCloseOnMissingPipeline() throws Exception { // This integration test is a bit of a hack to see if the highly // improbable event where the Datanode does not have the pipeline // in its Ratis channel but still receives a close container command // for a container that is open or in closing state. // Bugs in code can lead to this sequence of events but for this test // to inject this state, it removes the pipeline by directly calling // the underlying method. OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("testQuasiClosed1", 1024, ReplicationConfig.fromTypeAndFactor(ReplicationType.RATIS, ReplicationFactor.THREE), new HashMap<>()); key.write("ratis".getBytes(UTF_8)); key.flush(); KeyOutputStream groupOutputStream = (KeyOutputStream) key. getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); Set<HddsDatanodeService> datanodeSet = TestHelper.getDatanodeServices(cluster, omKeyLocationInfo.getPipeline()); long containerID = omKeyLocationInfo.getContainerID(); for (HddsDatanodeService dn : datanodeSet) { XceiverServerRatis wc = (XceiverServerRatis) dn.getDatanodeStateMachine().getContainer().getWriteChannel(); if (wc == null) { // Test applicable only for RATIS based channel. return; } wc.notifyGroupRemove(RaftGroupId .valueOf(omKeyLocationInfo.getPipeline().getId().getId())); SCMCommand<?> command = new CloseContainerCommand( containerID, omKeyLocationInfo.getPipeline().getId()); command.setTerm( cluster .getStorageContainerManager() .getScmContext() .getTermOfLeader()); cluster.getStorageContainerManager().getScmNodeManager() .addDatanodeCommand(dn.getDatanodeDetails().getID(), command); } for (HddsDatanodeService dn : datanodeSet) { LambdaTestUtils.await(20000, 1000, () -> (dn.getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(containerID) .getContainerState().equals(QUASI_CLOSED))); } key.close(); } @Test @Flaky("HDDS-12215") public void testContainerStateMachineRestartWithDNChangePipeline() throws Exception { try (OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("testDNRestart", 1024, ReplicationConfig.fromTypeAndFactor(ReplicationType.RATIS, ReplicationFactor.THREE), new HashMap<>())) { key.write("ratis".getBytes(UTF_8)); key.flush(); KeyOutputStream groupOutputStream = (KeyOutputStream) key. getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); Pipeline pipeline = omKeyLocationInfo.getPipeline(); List<HddsDatanodeService> datanodes = new ArrayList<>(TestHelper.getDatanodeServices(cluster, pipeline)); DatanodeDetails dn = datanodes.get(0).getDatanodeDetails(); // Delete all data volumes. cluster.getHddsDatanode(dn).getDatanodeStateMachine().getContainer().getVolumeSet().getVolumesList() .stream().forEach(v -> { try { FileUtils.deleteDirectory(v.getStorageDir()); } catch (IOException e) { throw new RuntimeException(e); } }); // Delete datanode.id datanodeIdFile. File datanodeIdFile = new File(HddsServerUtil.getDatanodeIdFilePath(cluster.getHddsDatanode(dn).getConf())); boolean deleted = datanodeIdFile.delete(); assertTrue(deleted); cluster.restartHddsDatanode(dn, false); GenericTestUtils.waitFor(() -> { try { key.write("ratis".getBytes(UTF_8)); key.flush(); return groupOutputStream.getLocationInfoList().size() > 1; } catch (IOException e) { throw new UncheckedIOException(e); } }, 1000, 30000); } } @Test public void testContainerStateMachineFailures() throws Exception { OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("ratis", 1024, ReplicationConfig.fromTypeAndFactor( ReplicationType.RATIS, ReplicationFactor.ONE), new HashMap<>()); byte[] testData = "ratis".getBytes(UTF_8); // First write and flush creates a container in the datanode key.write(testData); key.flush(); key.write(testData); KeyOutputStream groupOutputStream = (KeyOutputStream) key.getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); HddsDatanodeService dn = TestHelper.getDatanodeService(omKeyLocationInfo, cluster); // delete the container dir FileUtil.fullyDelete(new File(dn.getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(omKeyLocationInfo.getContainerID()). getContainerData().getContainerPath())); try { // there is only 1 datanode in the pipeline, the pipeline will be closed // and allocation to new pipeline will fail as there is no other dn in // the cluster key.close(); } catch (IOException ioe) { } long containerID = omKeyLocationInfo.getContainerID(); // Make sure the container is marked unhealthy assertSame(dn.getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(containerID) .getContainerState(), UNHEALTHY); OzoneContainer ozoneContainer; // restart the hdds datanode, container should not in the regular set OzoneConfiguration config = dn.getConf(); final String dir = config.get(OzoneConfigKeys. HDDS_CONTAINER_RATIS_DATANODE_STORAGE_DIR) + UUID.randomUUID(); config.set(OzoneConfigKeys.HDDS_CONTAINER_RATIS_DATANODE_STORAGE_DIR, dir); int index = cluster.getHddsDatanodeIndex(dn.getDatanodeDetails()); cluster.restartHddsDatanode(dn.getDatanodeDetails(), false); ozoneContainer = cluster.getHddsDatanodes().get(index) .getDatanodeStateMachine().getContainer(); assertNull(ozoneContainer.getContainerSet(). getContainer(containerID)); } @Test public void testUnhealthyContainer() throws Exception { OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("ratis", 1024, ReplicationConfig.fromTypeAndFactor( ReplicationType.RATIS, ReplicationFactor.ONE), new HashMap<>()); // First write and flush creates a container in the datanode key.write("ratis".getBytes(UTF_8)); key.flush(); key.write("ratis".getBytes(UTF_8)); KeyOutputStream groupOutputStream = (KeyOutputStream) key .getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); HddsDatanodeService dn = TestHelper.getDatanodeService(omKeyLocationInfo, cluster); ContainerData containerData = dn.getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(omKeyLocationInfo.getContainerID()) .getContainerData(); KeyValueContainerData keyValueContainerData = assertInstanceOf(KeyValueContainerData.class, containerData); // delete the container db file FileUtil.fullyDelete(new File(keyValueContainerData.getChunksPath())); try { // there is only 1 datanode in the pipeline, the pipeline will be closed // and allocation to new pipeline will fail as there is no other dn in // the cluster key.close(); } catch (IOException ioe) { } long containerID = omKeyLocationInfo.getContainerID(); // Make sure the container is marked unhealthy assertSame(dn.getDatanodeStateMachine() .getContainer().getContainerSet().getContainer(containerID) .getContainerState(), UNHEALTHY); // Check metadata in the .container file File containerFile = new File(keyValueContainerData.getMetadataPath(), containerID + OzoneConsts.CONTAINER_EXTENSION); keyValueContainerData = (KeyValueContainerData) ContainerDataYaml .readContainerFile(containerFile); assertEquals(keyValueContainerData.getState(), UNHEALTHY); OzoneConfiguration config = dn.getConf(); final String dir = config.get(OzoneConfigKeys. HDDS_CONTAINER_RATIS_DATANODE_STORAGE_DIR) + UUID.randomUUID(); config.set(OzoneConfigKeys.HDDS_CONTAINER_RATIS_DATANODE_STORAGE_DIR, dir); int index = cluster.getHddsDatanodeIndex(dn.getDatanodeDetails()); // restart the hdds datanode and see if the container is listed in the // in the missing container set and not in the regular set cluster.restartHddsDatanode(dn.getDatanodeDetails(), true); // make sure the container state is still marked unhealthy after restart keyValueContainerData = (KeyValueContainerData) ContainerDataYaml .readContainerFile(containerFile); assertEquals(keyValueContainerData.getState(), UNHEALTHY); OzoneContainer ozoneContainer; HddsDatanodeService dnService = cluster.getHddsDatanodes().get(index); ozoneContainer = dnService .getDatanodeStateMachine().getContainer(); HddsDispatcher dispatcher = (HddsDispatcher) ozoneContainer .getDispatcher(); ContainerProtos.ContainerCommandRequestProto.Builder request = ContainerProtos.ContainerCommandRequestProto.newBuilder(); request.setCmdType(ContainerProtos.Type.CloseContainer); request.setContainerID(containerID); request.setCloseContainer( ContainerProtos.CloseContainerRequestProto.getDefaultInstance()); request.setDatanodeUuid(dnService.getDatanodeDetails().getUuidString()); assertEquals(ContainerProtos.Result.CONTAINER_UNHEALTHY, dispatcher.dispatch(request.build(), null) .getResult()); } @Test public void testApplyTransactionFailure() throws Exception { OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("ratis", 1024, ReplicationConfig.fromTypeAndFactor( ReplicationType.RATIS, ReplicationFactor.ONE), new HashMap<>()); // First write and flush creates a container in the datanode key.write("ratis".getBytes(UTF_8)); key.flush(); key.write("ratis".getBytes(UTF_8)); KeyOutputStream groupOutputStream = (KeyOutputStream) key. getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); HddsDatanodeService dn = TestHelper.getDatanodeService(omKeyLocationInfo, cluster); int index = cluster.getHddsDatanodeIndex(dn.getDatanodeDetails()); ContainerData containerData = dn.getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(omKeyLocationInfo.getContainerID()) .getContainerData(); KeyValueContainerData keyValueContainerData = assertInstanceOf(KeyValueContainerData.class, containerData); key.close(); ContainerStateMachine stateMachine = (ContainerStateMachine) TestHelper.getStateMachine(cluster. getHddsDatanodes().get(index), omKeyLocationInfo.getPipeline()); SimpleStateMachineStorage storage = (SimpleStateMachineStorage) stateMachine.getStateMachineStorage(); stateMachine.takeSnapshot(); final FileInfo snapshot = getSnapshotFileInfo(storage); final Path parentPath = snapshot.getPath(); // Since the snapshot threshold is set to 1, since there are // applyTransactions, we should see snapshots assertThat(parentPath.getParent().toFile().listFiles().length).isGreaterThan(0); assertNotNull(snapshot); long containerID = omKeyLocationInfo.getContainerID(); // delete the container db file FileUtil.fullyDelete(new File(keyValueContainerData.getContainerPath())); Pipeline pipeline = cluster.getStorageContainerLocationClient() .getContainerWithPipeline(containerID).getPipeline(); XceiverClientSpi xceiverClient = xceiverClientManager.acquireClient(pipeline); ContainerProtos.ContainerCommandRequestProto.Builder request = ContainerProtos.ContainerCommandRequestProto.newBuilder(); request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); request.setCmdType(ContainerProtos.Type.CloseContainer); request.setContainerID(containerID); request.setCloseContainer( ContainerProtos.CloseContainerRequestProto.getDefaultInstance()); // close container transaction will fail over Ratis and will initiate // a pipeline close action try { assertThrows(IOException.class, () -> xceiverClient.sendCommand(request.build())); } finally { xceiverClientManager.releaseClient(xceiverClient, false); } // Make sure the container is marked unhealthy assertSame(dn.getDatanodeStateMachine() .getContainer().getContainerSet().getContainer(containerID) .getContainerState(), UNHEALTHY); try { // try to take a new snapshot, ideally it should just fail stateMachine.takeSnapshot(); } catch (IOException ioe) { assertInstanceOf(StateMachineException.class, ioe); } if (snapshot.getPath().toFile().exists()) { // Make sure the latest snapshot is same as the previous one try { final FileInfo latestSnapshot = getSnapshotFileInfo(storage); assertEquals(snapshot.getPath(), latestSnapshot.getPath()); } catch (Throwable e) { assertFalse(snapshot.getPath().toFile().exists()); } } // when remove pipeline, group dir including snapshot will be deleted LambdaTestUtils.await(10000, 500, () -> (!snapshot.getPath().toFile().exists())); } @Test @Flaky("HDDS-6115") void testApplyTransactionIdempotencyWithClosedContainer() throws Exception { OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("ratis", 1024, ReplicationConfig.fromTypeAndFactor( ReplicationType.RATIS, ReplicationFactor.ONE), new HashMap<>()); // First write and flush creates a container in the datanode key.write("ratis".getBytes(UTF_8)); key.flush(); key.write("ratis".getBytes(UTF_8)); KeyOutputStream groupOutputStream = (KeyOutputStream) key.getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); HddsDatanodeService dn = TestHelper.getDatanodeService(omKeyLocationInfo, cluster); ContainerData containerData = dn.getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(omKeyLocationInfo.getContainerID()) .getContainerData(); assertInstanceOf(KeyValueContainerData.class, containerData); key.close(); ContainerStateMachine stateMachine = (ContainerStateMachine) TestHelper.getStateMachine(dn, omKeyLocationInfo.getPipeline()); SimpleStateMachineStorage storage = (SimpleStateMachineStorage) stateMachine.getStateMachineStorage(); final FileInfo snapshot = getSnapshotFileInfo(storage); final Path parentPath = snapshot.getPath(); stateMachine.takeSnapshot(); assertThat(parentPath.getParent().toFile().listFiles().length).isGreaterThan(0); assertNotNull(snapshot); long markIndex1 = StatemachineImplTestUtil.findLatestSnapshot(storage) .getIndex(); long containerID = omKeyLocationInfo.getContainerID(); Pipeline pipeline = cluster.getStorageContainerLocationClient() .getContainerWithPipeline(containerID).getPipeline(); XceiverClientSpi xceiverClient = xceiverClientManager.acquireClient(pipeline); ContainerProtos.ContainerCommandRequestProto.Builder request = ContainerProtos.ContainerCommandRequestProto.newBuilder(); request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); request.setCmdType(ContainerProtos.Type.CloseContainer); request.setContainerID(containerID); request.setCloseContainer( ContainerProtos.CloseContainerRequestProto.getDefaultInstance()); xceiverClient.sendCommand(request.build()); assertSame( TestHelper.getDatanodeService(omKeyLocationInfo, cluster) .getDatanodeStateMachine() .getContainer().getContainerSet().getContainer(containerID) .getContainerState(), ContainerProtos.ContainerDataProto.State.CLOSED); assertTrue(stateMachine.isStateMachineHealthy()); try { stateMachine.takeSnapshot(); } finally { xceiverClientManager.releaseClient(xceiverClient, false); } // This is just an attempt to wait for an asynchronous call from Ratis API // to updateIncreasingly to finish as part of flaky test issue "HDDS-6115" // This doesn't solve the problem completely but reduce the failure ratio. GenericTestUtils.waitFor((() -> { try { return markIndex1 != StatemachineImplTestUtil .findLatestSnapshot(storage).getIndex(); } catch (IOException e) { // No action needed. The test case is going to fail at assertion. return true; } }), 1000, 30000); final FileInfo latestSnapshot = getSnapshotFileInfo(storage); assertNotEquals(snapshot.getPath(), latestSnapshot.getPath()); } // The test injects multiple write chunk requests along with closed container // request thereby inducing a situation where a writeStateMachine call // gets executed when the closed container apply completes thereby // failing writeStateMachine call. In any case, our stateMachine should // not be marked unhealthy and pipeline should not fail if container gets // closed here. @Test @Flaky("HDDS-13482") void testWriteStateMachineDataIdempotencyWithClosedContainer() throws Exception { OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("ratis-1", 1024, ReplicationConfig.fromTypeAndFactor( ReplicationType.RATIS, ReplicationFactor.ONE), new HashMap<>()); // First write and flush creates a container in the datanode key.write("ratis".getBytes(UTF_8)); key.flush(); key.write("ratis".getBytes(UTF_8)); KeyOutputStream groupOutputStream = (KeyOutputStream) key .getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); HddsDatanodeService dn = TestHelper.getDatanodeService(omKeyLocationInfo, cluster); ContainerData containerData = dn.getDatanodeStateMachine() .getContainer().getContainerSet() .getContainer(omKeyLocationInfo.getContainerID()) .getContainerData(); assertInstanceOf(KeyValueContainerData.class, containerData); key.close(); ContainerStateMachine stateMachine = (ContainerStateMachine) TestHelper.getStateMachine(dn, omKeyLocationInfo.getPipeline()); SimpleStateMachineStorage storage = (SimpleStateMachineStorage) stateMachine.getStateMachineStorage(); final FileInfo snapshot = getSnapshotFileInfo(storage); final Path parentPath = snapshot.getPath(); stateMachine.takeSnapshot(); // Since the snapshot threshold is set to 1, since there are // applyTransactions, we should see snapshots assertThat(parentPath.getParent().toFile().listFiles().length).isGreaterThan(0); assertNotNull(snapshot); long containerID = omKeyLocationInfo.getContainerID(); Pipeline pipeline = cluster.getStorageContainerLocationClient() .getContainerWithPipeline(containerID).getPipeline(); XceiverClientSpi xceiverClient = xceiverClientManager.acquireClient(pipeline); CountDownLatch latch = new CountDownLatch(100); int count = 0; AtomicInteger failCount = new AtomicInteger(0); Runnable r1 = () -> { try { ContainerProtos.ContainerCommandRequestProto.Builder request = ContainerProtos.ContainerCommandRequestProto.newBuilder(); request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); request.setCmdType(ContainerProtos.Type.CloseContainer); request.setContainerID(containerID); request.setCloseContainer( ContainerProtos.CloseContainerRequestProto. getDefaultInstance()); xceiverClient.sendCommand(request.build()); } catch (IOException e) { failCount.incrementAndGet(); } }; Runnable r2 = () -> { try { ByteString data = ByteString.copyFromUtf8("hello"); ContainerProtos.ContainerCommandRequestProto.Builder writeChunkRequest = ContainerTestHelper.newWriteChunkRequestBuilder(pipeline, omKeyLocationInfo.getBlockID(), data.size()); writeChunkRequest.setWriteChunk(writeChunkRequest.getWriteChunkBuilder() .setData(data)); xceiverClient.sendCommand(writeChunkRequest.build()); latch.countDown(); } catch (IOException e) { latch.countDown(); if (!(HddsClientUtils .checkForException(e) instanceof ContainerNotOpenException)) { failCount.incrementAndGet(); } String message = e.getMessage(); assertThat(message).doesNotContain("hello"); assertThat(message).contains(HddsUtils.REDACTED.toStringUtf8()); } }; try { List<Thread> threadList = new ArrayList<>(); for (int i = 0; i < 100; i++) { count++; Thread r = new Thread(r2); r.start(); threadList.add(r); } Thread closeContainerThread = new Thread(r1); closeContainerThread.start(); threadList.add(closeContainerThread); assertTrue(latch.await(600, TimeUnit.SECONDS)); for (int i = 0; i < 101; i++) { threadList.get(i).join(); } if (failCount.get() > 0) { fail( "testWriteStateMachineDataIdempotencyWithClosedContainer " + "failed"); } assertSame( TestHelper.getDatanodeService(omKeyLocationInfo, cluster) .getDatanodeStateMachine() .getContainer().getContainerSet().getContainer(containerID) .getContainerState(), ContainerProtos.ContainerDataProto.State.CLOSED); assertTrue(stateMachine.isStateMachineHealthy()); stateMachine.takeSnapshot(); final FileInfo latestSnapshot = getSnapshotFileInfo(storage); assertNotEquals(snapshot.getPath(), latestSnapshot.getPath()); r2.run(); } finally { xceiverClientManager.releaseClient(xceiverClient, false); } } @Test void testContainerStateMachineSingleFailureRetry() throws Exception { try (OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("ratis1", 1024, ReplicationConfig.fromTypeAndFactor(ReplicationType.RATIS, ReplicationFactor.THREE), new HashMap<>())) { key.write("ratis".getBytes(UTF_8)); key.flush(); key.write("ratis".getBytes(UTF_8)); key.write("ratis".getBytes(UTF_8)); KeyOutputStream groupOutputStream = (KeyOutputStream) key. getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); induceFollowerFailure(omKeyLocationInfo, 2); key.flush(); key.write("ratis".getBytes(UTF_8)); key.flush(); } validateData("ratis1", 2, "ratisratisratisratis"); } @Test void testContainerStateMachineDualFailureRetry() throws Exception { OzoneOutputStream key = objectStore.getVolume(volumeName).getBucket(bucketName) .createKey("ratis2", 1024, ReplicationConfig.fromTypeAndFactor(ReplicationType.RATIS, ReplicationFactor.THREE), new HashMap<>()); key.write("ratis".getBytes(UTF_8)); key.flush(); key.write("ratis".getBytes(UTF_8)); key.write("ratis".getBytes(UTF_8)); KeyOutputStream groupOutputStream = (KeyOutputStream) key. getOutputStream(); List<OmKeyLocationInfo> locationInfoList = groupOutputStream.getLocationInfoList(); assertEquals(1, locationInfoList.size()); OmKeyLocationInfo omKeyLocationInfo = locationInfoList.get(0); induceFollowerFailure(omKeyLocationInfo, 1); key.flush(); key.write("ratis".getBytes(UTF_8)); key.flush(); key.close(); validateData("ratis1", 2, "ratisratisratisratis"); } private void induceFollowerFailure(OmKeyLocationInfo omKeyLocationInfo, int failureCount) { DatanodeID leader = omKeyLocationInfo.getPipeline().getLeaderId(); Set<HddsDatanodeService> datanodeSet = TestHelper.getDatanodeServices(cluster, omKeyLocationInfo.getPipeline()); int count = 0; for (HddsDatanodeService dn : datanodeSet) { DatanodeID dnId = dn.getDatanodeDetails().getID(); if (!dnId.equals(leader)) { count++; long containerID = omKeyLocationInfo.getContainerID(); Container container = dn .getDatanodeStateMachine() .getContainer() .getContainerSet() .getContainer(containerID); if (container != null) { ContainerData containerData = container .getContainerData(); KeyValueContainerData keyValueContainerData = assertInstanceOf(KeyValueContainerData.class, containerData); FileUtil.fullyDelete(new File(keyValueContainerData.getChunksPath())); } if (count == failureCount) { break; } } } } private void validateData(String key, int locationCount, String payload) throws Exception { OmKeyArgs omKeyArgs = new OmKeyArgs.Builder() .setVolumeName(volumeName) .setBucketName(bucketName) .setKeyName(key) .build(); OmKeyInfo keyInfo = cluster.getOzoneManager().lookupKey(omKeyArgs); assertEquals(locationCount, keyInfo.getLatestVersionLocations().getLocationListCount()); byte[] buffer = new byte[Math.toIntExact(keyInfo.getDataSize())]; try (OzoneInputStream o = objectStore.getVolume(volumeName) .getBucket(bucketName).readKey(key)) { IOUtils.readFully(o, buffer); } String response = new String(buffer, StandardCharsets.UTF_8); assertEquals(payload, response); } static FileInfo getSnapshotFileInfo(SimpleStateMachineStorage storage) throws IOException { return StatemachineImplTestUtil.findLatestSnapshot(storage).getFile(); } }
googleapis/google-cloud-java
37,435
java-privilegedaccessmanager/google-cloud-privilegedaccessmanager/src/main/java/com/google/cloud/privilegedaccessmanager/v1/stub/GrpcPrivilegedAccessManagerStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.privilegedaccessmanager.v1.stub; import static com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerClient.ListEntitlementsPagedResponse; import static com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerClient.ListGrantsPagedResponse; import static com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerClient.ListLocationsPagedResponse; import static com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerClient.SearchEntitlementsPagedResponse; import static com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerClient.SearchGrantsPagedResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.privilegedaccessmanager.v1.ApproveGrantRequest; import com.google.cloud.privilegedaccessmanager.v1.CheckOnboardingStatusRequest; import com.google.cloud.privilegedaccessmanager.v1.CheckOnboardingStatusResponse; import com.google.cloud.privilegedaccessmanager.v1.CreateEntitlementRequest; import com.google.cloud.privilegedaccessmanager.v1.CreateGrantRequest; import com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest; import com.google.cloud.privilegedaccessmanager.v1.DenyGrantRequest; import com.google.cloud.privilegedaccessmanager.v1.Entitlement; import com.google.cloud.privilegedaccessmanager.v1.GetEntitlementRequest; import com.google.cloud.privilegedaccessmanager.v1.GetGrantRequest; import com.google.cloud.privilegedaccessmanager.v1.Grant; import com.google.cloud.privilegedaccessmanager.v1.ListEntitlementsRequest; import com.google.cloud.privilegedaccessmanager.v1.ListEntitlementsResponse; import com.google.cloud.privilegedaccessmanager.v1.ListGrantsRequest; import com.google.cloud.privilegedaccessmanager.v1.ListGrantsResponse; import com.google.cloud.privilegedaccessmanager.v1.OperationMetadata; import com.google.cloud.privilegedaccessmanager.v1.RevokeGrantRequest; import com.google.cloud.privilegedaccessmanager.v1.SearchEntitlementsRequest; import com.google.cloud.privilegedaccessmanager.v1.SearchEntitlementsResponse; import com.google.cloud.privilegedaccessmanager.v1.SearchGrantsRequest; import com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse; import com.google.cloud.privilegedaccessmanager.v1.UpdateEntitlementRequest; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the PrivilegedAccessManager service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcPrivilegedAccessManagerStub extends PrivilegedAccessManagerStub { private static final MethodDescriptor<CheckOnboardingStatusRequest, CheckOnboardingStatusResponse> checkOnboardingStatusMethodDescriptor = MethodDescriptor.<CheckOnboardingStatusRequest, CheckOnboardingStatusResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/CheckOnboardingStatus") .setRequestMarshaller( ProtoUtils.marshaller(CheckOnboardingStatusRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(CheckOnboardingStatusResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListEntitlementsRequest, ListEntitlementsResponse> listEntitlementsMethodDescriptor = MethodDescriptor.<ListEntitlementsRequest, ListEntitlementsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/ListEntitlements") .setRequestMarshaller( ProtoUtils.marshaller(ListEntitlementsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListEntitlementsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<SearchEntitlementsRequest, SearchEntitlementsResponse> searchEntitlementsMethodDescriptor = MethodDescriptor.<SearchEntitlementsRequest, SearchEntitlementsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/SearchEntitlements") .setRequestMarshaller( ProtoUtils.marshaller(SearchEntitlementsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(SearchEntitlementsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetEntitlementRequest, Entitlement> getEntitlementMethodDescriptor = MethodDescriptor.<GetEntitlementRequest, Entitlement>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/GetEntitlement") .setRequestMarshaller( ProtoUtils.marshaller(GetEntitlementRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Entitlement.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<CreateEntitlementRequest, Operation> createEntitlementMethodDescriptor = MethodDescriptor.<CreateEntitlementRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/CreateEntitlement") .setRequestMarshaller( ProtoUtils.marshaller(CreateEntitlementRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DeleteEntitlementRequest, Operation> deleteEntitlementMethodDescriptor = MethodDescriptor.<DeleteEntitlementRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/DeleteEntitlement") .setRequestMarshaller( ProtoUtils.marshaller(DeleteEntitlementRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<UpdateEntitlementRequest, Operation> updateEntitlementMethodDescriptor = MethodDescriptor.<UpdateEntitlementRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/UpdateEntitlement") .setRequestMarshaller( ProtoUtils.marshaller(UpdateEntitlementRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListGrantsRequest, ListGrantsResponse> listGrantsMethodDescriptor = MethodDescriptor.<ListGrantsRequest, ListGrantsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/ListGrants") .setRequestMarshaller(ProtoUtils.marshaller(ListGrantsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ListGrantsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<SearchGrantsRequest, SearchGrantsResponse> searchGrantsMethodDescriptor = MethodDescriptor.<SearchGrantsRequest, SearchGrantsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/SearchGrants") .setRequestMarshaller(ProtoUtils.marshaller(SearchGrantsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(SearchGrantsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetGrantRequest, Grant> getGrantMethodDescriptor = MethodDescriptor.<GetGrantRequest, Grant>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/GetGrant") .setRequestMarshaller(ProtoUtils.marshaller(GetGrantRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Grant.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<CreateGrantRequest, Grant> createGrantMethodDescriptor = MethodDescriptor.<CreateGrantRequest, Grant>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/CreateGrant") .setRequestMarshaller(ProtoUtils.marshaller(CreateGrantRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Grant.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ApproveGrantRequest, Grant> approveGrantMethodDescriptor = MethodDescriptor.<ApproveGrantRequest, Grant>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/ApproveGrant") .setRequestMarshaller(ProtoUtils.marshaller(ApproveGrantRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Grant.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DenyGrantRequest, Grant> denyGrantMethodDescriptor = MethodDescriptor.<DenyGrantRequest, Grant>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/DenyGrant") .setRequestMarshaller(ProtoUtils.marshaller(DenyGrantRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Grant.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<RevokeGrantRequest, Operation> revokeGrantMethodDescriptor = MethodDescriptor.<RevokeGrantRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManager/RevokeGrant") .setRequestMarshaller(ProtoUtils.marshaller(RevokeGrantRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/ListLocations") .setRequestMarshaller( ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = MethodDescriptor.<GetLocationRequest, Location>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/GetLocation") .setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private final UnaryCallable<CheckOnboardingStatusRequest, CheckOnboardingStatusResponse> checkOnboardingStatusCallable; private final UnaryCallable<ListEntitlementsRequest, ListEntitlementsResponse> listEntitlementsCallable; private final UnaryCallable<ListEntitlementsRequest, ListEntitlementsPagedResponse> listEntitlementsPagedCallable; private final UnaryCallable<SearchEntitlementsRequest, SearchEntitlementsResponse> searchEntitlementsCallable; private final UnaryCallable<SearchEntitlementsRequest, SearchEntitlementsPagedResponse> searchEntitlementsPagedCallable; private final UnaryCallable<GetEntitlementRequest, Entitlement> getEntitlementCallable; private final UnaryCallable<CreateEntitlementRequest, Operation> createEntitlementCallable; private final OperationCallable<CreateEntitlementRequest, Entitlement, OperationMetadata> createEntitlementOperationCallable; private final UnaryCallable<DeleteEntitlementRequest, Operation> deleteEntitlementCallable; private final OperationCallable<DeleteEntitlementRequest, Entitlement, OperationMetadata> deleteEntitlementOperationCallable; private final UnaryCallable<UpdateEntitlementRequest, Operation> updateEntitlementCallable; private final OperationCallable<UpdateEntitlementRequest, Entitlement, OperationMetadata> updateEntitlementOperationCallable; private final UnaryCallable<ListGrantsRequest, ListGrantsResponse> listGrantsCallable; private final UnaryCallable<ListGrantsRequest, ListGrantsPagedResponse> listGrantsPagedCallable; private final UnaryCallable<SearchGrantsRequest, SearchGrantsResponse> searchGrantsCallable; private final UnaryCallable<SearchGrantsRequest, SearchGrantsPagedResponse> searchGrantsPagedCallable; private final UnaryCallable<GetGrantRequest, Grant> getGrantCallable; private final UnaryCallable<CreateGrantRequest, Grant> createGrantCallable; private final UnaryCallable<ApproveGrantRequest, Grant> approveGrantCallable; private final UnaryCallable<DenyGrantRequest, Grant> denyGrantCallable; private final UnaryCallable<RevokeGrantRequest, Operation> revokeGrantCallable; private final OperationCallable<RevokeGrantRequest, Grant, OperationMetadata> revokeGrantOperationCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcPrivilegedAccessManagerStub create( PrivilegedAccessManagerStubSettings settings) throws IOException { return new GrpcPrivilegedAccessManagerStub(settings, ClientContext.create(settings)); } public static final GrpcPrivilegedAccessManagerStub create(ClientContext clientContext) throws IOException { return new GrpcPrivilegedAccessManagerStub( PrivilegedAccessManagerStubSettings.newBuilder().build(), clientContext); } public static final GrpcPrivilegedAccessManagerStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcPrivilegedAccessManagerStub( PrivilegedAccessManagerStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcPrivilegedAccessManagerStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcPrivilegedAccessManagerStub( PrivilegedAccessManagerStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcPrivilegedAccessManagerCallableFactory()); } /** * Constructs an instance of GrpcPrivilegedAccessManagerStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcPrivilegedAccessManagerStub( PrivilegedAccessManagerStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<CheckOnboardingStatusRequest, CheckOnboardingStatusResponse> checkOnboardingStatusTransportSettings = GrpcCallSettings .<CheckOnboardingStatusRequest, CheckOnboardingStatusResponse>newBuilder() .setMethodDescriptor(checkOnboardingStatusMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<ListEntitlementsRequest, ListEntitlementsResponse> listEntitlementsTransportSettings = GrpcCallSettings.<ListEntitlementsRequest, ListEntitlementsResponse>newBuilder() .setMethodDescriptor(listEntitlementsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<SearchEntitlementsRequest, SearchEntitlementsResponse> searchEntitlementsTransportSettings = GrpcCallSettings.<SearchEntitlementsRequest, SearchEntitlementsResponse>newBuilder() .setMethodDescriptor(searchEntitlementsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<GetEntitlementRequest, Entitlement> getEntitlementTransportSettings = GrpcCallSettings.<GetEntitlementRequest, Entitlement>newBuilder() .setMethodDescriptor(getEntitlementMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<CreateEntitlementRequest, Operation> createEntitlementTransportSettings = GrpcCallSettings.<CreateEntitlementRequest, Operation>newBuilder() .setMethodDescriptor(createEntitlementMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<DeleteEntitlementRequest, Operation> deleteEntitlementTransportSettings = GrpcCallSettings.<DeleteEntitlementRequest, Operation>newBuilder() .setMethodDescriptor(deleteEntitlementMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<UpdateEntitlementRequest, Operation> updateEntitlementTransportSettings = GrpcCallSettings.<UpdateEntitlementRequest, Operation>newBuilder() .setMethodDescriptor(updateEntitlementMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add( "entitlement.name", String.valueOf(request.getEntitlement().getName())); return builder.build(); }) .build(); GrpcCallSettings<ListGrantsRequest, ListGrantsResponse> listGrantsTransportSettings = GrpcCallSettings.<ListGrantsRequest, ListGrantsResponse>newBuilder() .setMethodDescriptor(listGrantsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<SearchGrantsRequest, SearchGrantsResponse> searchGrantsTransportSettings = GrpcCallSettings.<SearchGrantsRequest, SearchGrantsResponse>newBuilder() .setMethodDescriptor(searchGrantsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<GetGrantRequest, Grant> getGrantTransportSettings = GrpcCallSettings.<GetGrantRequest, Grant>newBuilder() .setMethodDescriptor(getGrantMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<CreateGrantRequest, Grant> createGrantTransportSettings = GrpcCallSettings.<CreateGrantRequest, Grant>newBuilder() .setMethodDescriptor(createGrantMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<ApproveGrantRequest, Grant> approveGrantTransportSettings = GrpcCallSettings.<ApproveGrantRequest, Grant>newBuilder() .setMethodDescriptor(approveGrantMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<DenyGrantRequest, Grant> denyGrantTransportSettings = GrpcCallSettings.<DenyGrantRequest, Grant>newBuilder() .setMethodDescriptor(denyGrantMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<RevokeGrantRequest, Operation> revokeGrantTransportSettings = GrpcCallSettings.<RevokeGrantRequest, Operation>newBuilder() .setMethodDescriptor(revokeGrantMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings = GrpcCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.checkOnboardingStatusCallable = callableFactory.createUnaryCallable( checkOnboardingStatusTransportSettings, settings.checkOnboardingStatusSettings(), clientContext); this.listEntitlementsCallable = callableFactory.createUnaryCallable( listEntitlementsTransportSettings, settings.listEntitlementsSettings(), clientContext); this.listEntitlementsPagedCallable = callableFactory.createPagedCallable( listEntitlementsTransportSettings, settings.listEntitlementsSettings(), clientContext); this.searchEntitlementsCallable = callableFactory.createUnaryCallable( searchEntitlementsTransportSettings, settings.searchEntitlementsSettings(), clientContext); this.searchEntitlementsPagedCallable = callableFactory.createPagedCallable( searchEntitlementsTransportSettings, settings.searchEntitlementsSettings(), clientContext); this.getEntitlementCallable = callableFactory.createUnaryCallable( getEntitlementTransportSettings, settings.getEntitlementSettings(), clientContext); this.createEntitlementCallable = callableFactory.createUnaryCallable( createEntitlementTransportSettings, settings.createEntitlementSettings(), clientContext); this.createEntitlementOperationCallable = callableFactory.createOperationCallable( createEntitlementTransportSettings, settings.createEntitlementOperationSettings(), clientContext, operationsStub); this.deleteEntitlementCallable = callableFactory.createUnaryCallable( deleteEntitlementTransportSettings, settings.deleteEntitlementSettings(), clientContext); this.deleteEntitlementOperationCallable = callableFactory.createOperationCallable( deleteEntitlementTransportSettings, settings.deleteEntitlementOperationSettings(), clientContext, operationsStub); this.updateEntitlementCallable = callableFactory.createUnaryCallable( updateEntitlementTransportSettings, settings.updateEntitlementSettings(), clientContext); this.updateEntitlementOperationCallable = callableFactory.createOperationCallable( updateEntitlementTransportSettings, settings.updateEntitlementOperationSettings(), clientContext, operationsStub); this.listGrantsCallable = callableFactory.createUnaryCallable( listGrantsTransportSettings, settings.listGrantsSettings(), clientContext); this.listGrantsPagedCallable = callableFactory.createPagedCallable( listGrantsTransportSettings, settings.listGrantsSettings(), clientContext); this.searchGrantsCallable = callableFactory.createUnaryCallable( searchGrantsTransportSettings, settings.searchGrantsSettings(), clientContext); this.searchGrantsPagedCallable = callableFactory.createPagedCallable( searchGrantsTransportSettings, settings.searchGrantsSettings(), clientContext); this.getGrantCallable = callableFactory.createUnaryCallable( getGrantTransportSettings, settings.getGrantSettings(), clientContext); this.createGrantCallable = callableFactory.createUnaryCallable( createGrantTransportSettings, settings.createGrantSettings(), clientContext); this.approveGrantCallable = callableFactory.createUnaryCallable( approveGrantTransportSettings, settings.approveGrantSettings(), clientContext); this.denyGrantCallable = callableFactory.createUnaryCallable( denyGrantTransportSettings, settings.denyGrantSettings(), clientContext); this.revokeGrantCallable = callableFactory.createUnaryCallable( revokeGrantTransportSettings, settings.revokeGrantSettings(), clientContext); this.revokeGrantOperationCallable = callableFactory.createOperationCallable( revokeGrantTransportSettings, settings.revokeGrantOperationSettings(), clientContext, operationsStub); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<CheckOnboardingStatusRequest, CheckOnboardingStatusResponse> checkOnboardingStatusCallable() { return checkOnboardingStatusCallable; } @Override public UnaryCallable<ListEntitlementsRequest, ListEntitlementsResponse> listEntitlementsCallable() { return listEntitlementsCallable; } @Override public UnaryCallable<ListEntitlementsRequest, ListEntitlementsPagedResponse> listEntitlementsPagedCallable() { return listEntitlementsPagedCallable; } @Override public UnaryCallable<SearchEntitlementsRequest, SearchEntitlementsResponse> searchEntitlementsCallable() { return searchEntitlementsCallable; } @Override public UnaryCallable<SearchEntitlementsRequest, SearchEntitlementsPagedResponse> searchEntitlementsPagedCallable() { return searchEntitlementsPagedCallable; } @Override public UnaryCallable<GetEntitlementRequest, Entitlement> getEntitlementCallable() { return getEntitlementCallable; } @Override public UnaryCallable<CreateEntitlementRequest, Operation> createEntitlementCallable() { return createEntitlementCallable; } @Override public OperationCallable<CreateEntitlementRequest, Entitlement, OperationMetadata> createEntitlementOperationCallable() { return createEntitlementOperationCallable; } @Override public UnaryCallable<DeleteEntitlementRequest, Operation> deleteEntitlementCallable() { return deleteEntitlementCallable; } @Override public OperationCallable<DeleteEntitlementRequest, Entitlement, OperationMetadata> deleteEntitlementOperationCallable() { return deleteEntitlementOperationCallable; } @Override public UnaryCallable<UpdateEntitlementRequest, Operation> updateEntitlementCallable() { return updateEntitlementCallable; } @Override public OperationCallable<UpdateEntitlementRequest, Entitlement, OperationMetadata> updateEntitlementOperationCallable() { return updateEntitlementOperationCallable; } @Override public UnaryCallable<ListGrantsRequest, ListGrantsResponse> listGrantsCallable() { return listGrantsCallable; } @Override public UnaryCallable<ListGrantsRequest, ListGrantsPagedResponse> listGrantsPagedCallable() { return listGrantsPagedCallable; } @Override public UnaryCallable<SearchGrantsRequest, SearchGrantsResponse> searchGrantsCallable() { return searchGrantsCallable; } @Override public UnaryCallable<SearchGrantsRequest, SearchGrantsPagedResponse> searchGrantsPagedCallable() { return searchGrantsPagedCallable; } @Override public UnaryCallable<GetGrantRequest, Grant> getGrantCallable() { return getGrantCallable; } @Override public UnaryCallable<CreateGrantRequest, Grant> createGrantCallable() { return createGrantCallable; } @Override public UnaryCallable<ApproveGrantRequest, Grant> approveGrantCallable() { return approveGrantCallable; } @Override public UnaryCallable<DenyGrantRequest, Grant> denyGrantCallable() { return denyGrantCallable; } @Override public UnaryCallable<RevokeGrantRequest, Operation> revokeGrantCallable() { return revokeGrantCallable; } @Override public OperationCallable<RevokeGrantRequest, Grant, OperationMetadata> revokeGrantOperationCallable() { return revokeGrantOperationCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-cloud-java
37,426
java-discoveryengine/google-cloud-discoveryengine/src/main/java/com/google/cloud/discoveryengine/v1/CmekConfigServiceClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.discoveryengine.v1; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.httpjson.longrunning.OperationsClient; import com.google.api.gax.longrunning.OperationFuture; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.discoveryengine.v1.stub.CmekConfigServiceStub; import com.google.cloud.discoveryengine.v1.stub.CmekConfigServiceStubSettings; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service for managing CMEK related tasks * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * CmekConfigName name = CmekConfigName.ofProjectLocationName("[PROJECT]", "[LOCATION]"); * CmekConfig response = cmekConfigServiceClient.getCmekConfig(name); * } * }</pre> * * <p>Note: close() needs to be called on the CmekConfigServiceClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> UpdateCmekConfig</td> * <td><p> Provisions a CMEK key for use in a location of a customer's project. This method will also conduct location validation on the provided cmekConfig to make sure the key is valid and can be used in the selected location.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> updateCmekConfigAsync(UpdateCmekConfigRequest request) * </ul> * <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p> * <ul> * <li><p> updateCmekConfigAsync(CmekConfig config) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> updateCmekConfigOperationCallable() * <li><p> updateCmekConfigCallable() * </ul> * </td> * </tr> * <tr> * <td><p> GetCmekConfig</td> * <td><p> Gets the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig].</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getCmekConfig(GetCmekConfigRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> getCmekConfig(CmekConfigName name) * <li><p> getCmekConfig(String name) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getCmekConfigCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListCmekConfigs</td> * <td><p> Lists all the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]s with the project.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listCmekConfigs(ListCmekConfigsRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> listCmekConfigs(LocationName parent) * <li><p> listCmekConfigs(String parent) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listCmekConfigsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> DeleteCmekConfig</td> * <td><p> De-provisions a CmekConfig.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> deleteCmekConfigAsync(DeleteCmekConfigRequest request) * </ul> * <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p> * <ul> * <li><p> deleteCmekConfigAsync(CmekConfigName name) * <li><p> deleteCmekConfigAsync(String name) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> deleteCmekConfigOperationCallable() * <li><p> deleteCmekConfigCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of CmekConfigServiceSettings to * create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * CmekConfigServiceSettings cmekConfigServiceSettings = * CmekConfigServiceSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * CmekConfigServiceClient cmekConfigServiceClient = * CmekConfigServiceClient.create(cmekConfigServiceSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * CmekConfigServiceSettings cmekConfigServiceSettings = * CmekConfigServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); * CmekConfigServiceClient cmekConfigServiceClient = * CmekConfigServiceClient.create(cmekConfigServiceSettings); * }</pre> * * <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over * the wire: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * CmekConfigServiceSettings cmekConfigServiceSettings = * CmekConfigServiceSettings.newHttpJsonBuilder().build(); * CmekConfigServiceClient cmekConfigServiceClient = * CmekConfigServiceClient.create(cmekConfigServiceSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class CmekConfigServiceClient implements BackgroundResource { private final CmekConfigServiceSettings settings; private final CmekConfigServiceStub stub; private final OperationsClient httpJsonOperationsClient; private final com.google.longrunning.OperationsClient operationsClient; /** Constructs an instance of CmekConfigServiceClient with default settings. */ public static final CmekConfigServiceClient create() throws IOException { return create(CmekConfigServiceSettings.newBuilder().build()); } /** * Constructs an instance of CmekConfigServiceClient, using the given settings. The channels are * created based on the settings passed in, or defaults for any settings that are not set. */ public static final CmekConfigServiceClient create(CmekConfigServiceSettings settings) throws IOException { return new CmekConfigServiceClient(settings); } /** * Constructs an instance of CmekConfigServiceClient, using the given stub for making calls. This * is for advanced usage - prefer using create(CmekConfigServiceSettings). */ public static final CmekConfigServiceClient create(CmekConfigServiceStub stub) { return new CmekConfigServiceClient(stub); } /** * Constructs an instance of CmekConfigServiceClient, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected CmekConfigServiceClient(CmekConfigServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((CmekConfigServiceStubSettings) settings.getStubSettings()).createStub(); this.operationsClient = com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub()); this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub()); } protected CmekConfigServiceClient(CmekConfigServiceStub stub) { this.settings = null; this.stub = stub; this.operationsClient = com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub()); this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub()); } public final CmekConfigServiceSettings getSettings() { return settings; } public CmekConfigServiceStub getStub() { return stub; } /** * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ public final com.google.longrunning.OperationsClient getOperationsClient() { return operationsClient; } /** * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ @BetaApi public final OperationsClient getHttpJsonOperationsClient() { return httpJsonOperationsClient; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Provisions a CMEK key for use in a location of a customer's project. This method will also * conduct location validation on the provided cmekConfig to make sure the key is valid and can be * used in the selected location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * CmekConfig config = CmekConfig.newBuilder().build(); * CmekConfig response = cmekConfigServiceClient.updateCmekConfigAsync(config).get(); * } * }</pre> * * @param config Required. The CmekConfig resource. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<CmekConfig, UpdateCmekConfigMetadata> updateCmekConfigAsync( CmekConfig config) { UpdateCmekConfigRequest request = UpdateCmekConfigRequest.newBuilder().setConfig(config).build(); return updateCmekConfigAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Provisions a CMEK key for use in a location of a customer's project. This method will also * conduct location validation on the provided cmekConfig to make sure the key is valid and can be * used in the selected location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * UpdateCmekConfigRequest request = * UpdateCmekConfigRequest.newBuilder() * .setConfig(CmekConfig.newBuilder().build()) * .setSetDefault(true) * .build(); * CmekConfig response = cmekConfigServiceClient.updateCmekConfigAsync(request).get(); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<CmekConfig, UpdateCmekConfigMetadata> updateCmekConfigAsync( UpdateCmekConfigRequest request) { return updateCmekConfigOperationCallable().futureCall(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Provisions a CMEK key for use in a location of a customer's project. This method will also * conduct location validation on the provided cmekConfig to make sure the key is valid and can be * used in the selected location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * UpdateCmekConfigRequest request = * UpdateCmekConfigRequest.newBuilder() * .setConfig(CmekConfig.newBuilder().build()) * .setSetDefault(true) * .build(); * OperationFuture<CmekConfig, UpdateCmekConfigMetadata> future = * cmekConfigServiceClient.updateCmekConfigOperationCallable().futureCall(request); * // Do something. * CmekConfig response = future.get(); * } * }</pre> */ public final OperationCallable<UpdateCmekConfigRequest, CmekConfig, UpdateCmekConfigMetadata> updateCmekConfigOperationCallable() { return stub.updateCmekConfigOperationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Provisions a CMEK key for use in a location of a customer's project. This method will also * conduct location validation on the provided cmekConfig to make sure the key is valid and can be * used in the selected location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * UpdateCmekConfigRequest request = * UpdateCmekConfigRequest.newBuilder() * .setConfig(CmekConfig.newBuilder().build()) * .setSetDefault(true) * .build(); * ApiFuture<Operation> future = * cmekConfigServiceClient.updateCmekConfigCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final UnaryCallable<UpdateCmekConfigRequest, Operation> updateCmekConfigCallable() { return stub.updateCmekConfigCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * CmekConfigName name = CmekConfigName.ofProjectLocationName("[PROJECT]", "[LOCATION]"); * CmekConfig response = cmekConfigServiceClient.getCmekConfig(name); * } * }</pre> * * @param name Required. Resource name of * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig], such as * `projects/&#42;/locations/&#42;/cmekConfig` or * `projects/&#42;/locations/&#42;/cmekConfigs/&#42;`. * <p>If the caller does not have permission to access the * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig], regardless of whether or not it * exists, a PERMISSION_DENIED error is returned. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final CmekConfig getCmekConfig(CmekConfigName name) { GetCmekConfigRequest request = GetCmekConfigRequest.newBuilder().setName(name == null ? null : name.toString()).build(); return getCmekConfig(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * String name = CmekConfigName.ofProjectLocationName("[PROJECT]", "[LOCATION]").toString(); * CmekConfig response = cmekConfigServiceClient.getCmekConfig(name); * } * }</pre> * * @param name Required. Resource name of * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig], such as * `projects/&#42;/locations/&#42;/cmekConfig` or * `projects/&#42;/locations/&#42;/cmekConfigs/&#42;`. * <p>If the caller does not have permission to access the * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig], regardless of whether or not it * exists, a PERMISSION_DENIED error is returned. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final CmekConfig getCmekConfig(String name) { GetCmekConfigRequest request = GetCmekConfigRequest.newBuilder().setName(name).build(); return getCmekConfig(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * GetCmekConfigRequest request = * GetCmekConfigRequest.newBuilder() * .setName(CmekConfigName.ofProjectLocationName("[PROJECT]", "[LOCATION]").toString()) * .build(); * CmekConfig response = cmekConfigServiceClient.getCmekConfig(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final CmekConfig getCmekConfig(GetCmekConfigRequest request) { return getCmekConfigCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * GetCmekConfigRequest request = * GetCmekConfigRequest.newBuilder() * .setName(CmekConfigName.ofProjectLocationName("[PROJECT]", "[LOCATION]").toString()) * .build(); * ApiFuture<CmekConfig> future = * cmekConfigServiceClient.getCmekConfigCallable().futureCall(request); * // Do something. * CmekConfig response = future.get(); * } * }</pre> */ public final UnaryCallable<GetCmekConfigRequest, CmekConfig> getCmekConfigCallable() { return stub.getCmekConfigCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]s with the project. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); * ListCmekConfigsResponse response = cmekConfigServiceClient.listCmekConfigs(parent); * } * }</pre> * * @param parent Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * <p>If the caller does not have permission to list * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]s under this location, regardless * of whether or not a CmekConfig exists, a PERMISSION_DENIED error is returned. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListCmekConfigsResponse listCmekConfigs(LocationName parent) { ListCmekConfigsRequest request = ListCmekConfigsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return listCmekConfigs(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]s with the project. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString(); * ListCmekConfigsResponse response = cmekConfigServiceClient.listCmekConfigs(parent); * } * }</pre> * * @param parent Required. The parent location resource name, such as * `projects/{project}/locations/{location}`. * <p>If the caller does not have permission to list * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]s under this location, regardless * of whether or not a CmekConfig exists, a PERMISSION_DENIED error is returned. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListCmekConfigsResponse listCmekConfigs(String parent) { ListCmekConfigsRequest request = ListCmekConfigsRequest.newBuilder().setParent(parent).build(); return listCmekConfigs(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]s with the project. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * ListCmekConfigsRequest request = * ListCmekConfigsRequest.newBuilder() * .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString()) * .build(); * ListCmekConfigsResponse response = cmekConfigServiceClient.listCmekConfigs(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListCmekConfigsResponse listCmekConfigs(ListCmekConfigsRequest request) { return listCmekConfigsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all the [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig]s with the project. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * ListCmekConfigsRequest request = * ListCmekConfigsRequest.newBuilder() * .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString()) * .build(); * ApiFuture<ListCmekConfigsResponse> future = * cmekConfigServiceClient.listCmekConfigsCallable().futureCall(request); * // Do something. * ListCmekConfigsResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<ListCmekConfigsRequest, ListCmekConfigsResponse> listCmekConfigsCallable() { return stub.listCmekConfigsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * De-provisions a CmekConfig. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * CmekConfigName name = * CmekConfigName.ofProjectLocationCmekConfigName( * "[PROJECT]", "[LOCATION]", "[CMEK_CONFIG]"); * cmekConfigServiceClient.deleteCmekConfigAsync(name).get(); * } * }</pre> * * @param name Required. The resource name of the * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig] to delete, such as * `projects/{project}/locations/{location}/cmekConfigs/{cmek_config}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Empty, DeleteCmekConfigMetadata> deleteCmekConfigAsync( CmekConfigName name) { DeleteCmekConfigRequest request = DeleteCmekConfigRequest.newBuilder().setName(name == null ? null : name.toString()).build(); return deleteCmekConfigAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * De-provisions a CmekConfig. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * String name = CmekConfigName.ofProjectLocationName("[PROJECT]", "[LOCATION]").toString(); * cmekConfigServiceClient.deleteCmekConfigAsync(name).get(); * } * }</pre> * * @param name Required. The resource name of the * [CmekConfig][google.cloud.discoveryengine.v1.CmekConfig] to delete, such as * `projects/{project}/locations/{location}/cmekConfigs/{cmek_config}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Empty, DeleteCmekConfigMetadata> deleteCmekConfigAsync(String name) { DeleteCmekConfigRequest request = DeleteCmekConfigRequest.newBuilder().setName(name).build(); return deleteCmekConfigAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * De-provisions a CmekConfig. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * DeleteCmekConfigRequest request = * DeleteCmekConfigRequest.newBuilder() * .setName( * CmekConfigName.ofProjectLocationCmekConfigName( * "[PROJECT]", "[LOCATION]", "[CMEK_CONFIG]") * .toString()) * .build(); * cmekConfigServiceClient.deleteCmekConfigAsync(request).get(); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Empty, DeleteCmekConfigMetadata> deleteCmekConfigAsync( DeleteCmekConfigRequest request) { return deleteCmekConfigOperationCallable().futureCall(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * De-provisions a CmekConfig. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * DeleteCmekConfigRequest request = * DeleteCmekConfigRequest.newBuilder() * .setName( * CmekConfigName.ofProjectLocationCmekConfigName( * "[PROJECT]", "[LOCATION]", "[CMEK_CONFIG]") * .toString()) * .build(); * OperationFuture<Empty, DeleteCmekConfigMetadata> future = * cmekConfigServiceClient.deleteCmekConfigOperationCallable().futureCall(request); * // Do something. * future.get(); * } * }</pre> */ public final OperationCallable<DeleteCmekConfigRequest, Empty, DeleteCmekConfigMetadata> deleteCmekConfigOperationCallable() { return stub.deleteCmekConfigOperationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * De-provisions a CmekConfig. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (CmekConfigServiceClient cmekConfigServiceClient = CmekConfigServiceClient.create()) { * DeleteCmekConfigRequest request = * DeleteCmekConfigRequest.newBuilder() * .setName( * CmekConfigName.ofProjectLocationCmekConfigName( * "[PROJECT]", "[LOCATION]", "[CMEK_CONFIG]") * .toString()) * .build(); * ApiFuture<Operation> future = * cmekConfigServiceClient.deleteCmekConfigCallable().futureCall(request); * // Do something. * future.get(); * } * }</pre> */ public final UnaryCallable<DeleteCmekConfigRequest, Operation> deleteCmekConfigCallable() { return stub.deleteCmekConfigCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } }
apache/lucene
37,355
lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestSimpleGeoPolygonRelationships.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.lucene.tests.util.LuceneTestCase; import org.junit.Test; /** * Check relationship between polygon and GeoShapes of basic polygons. Normally we construct the * convex, concave counterpart and the convex polygon as a complex polygon. */ public class TestSimpleGeoPolygonRelationships extends LuceneTestCase { /** Test with two shapes with no crossing edges and no points in common in convex case. */ @Test public void testGeoSimplePolygon1() { // POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 // -60.257713, 19.845091 -60.452631)) disjoint GeoPolygon originalConvexPol = buildConvexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalConcavePol = buildConcaveGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalComplexPol = buildComplexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -60.4, 20.1, -60.4, 20.1, -60.3, 20.0, -60.3); GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -60.4, 20.1, -60.4, 20.1, -60.3, 20.0, -60.3); // Convex int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.DISJOINT, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.DISJOINT, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.CONTAINS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.WITHIN, rel); // Check // Concave rel = originalConcavePol.getRelationship(polConvex); assertEquals(GeoArea.WITHIN, rel); rel = polConvex.getRelationship(originalConcavePol); assertEquals(GeoArea.CONTAINS, rel); rel = originalConcavePol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConcavePol); assertEquals(GeoArea.OVERLAPS, rel); // Complex rel = originalComplexPol.getRelationship(polConvex); assertEquals(GeoArea.DISJOINT, rel); rel = polConvex.getRelationship(originalComplexPol); assertEquals(GeoArea.DISJOINT, rel); rel = originalComplexPol.getRelationship(polConcave); assertEquals(GeoArea.CONTAINS, rel); rel = polConcave.getRelationship(originalComplexPol); assertEquals(GeoArea.WITHIN, rel); } /** Test with two shapes with crossing edges and some points inside in convex case. */ @Test public void testGeoSimplePolygon2() { // POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 // -60.257713, 19.845091 -60.452631)) disjoint GeoPolygon originalConvexPol = buildConvexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalConcavePol = buildConcaveGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalComplexPol = buildComplexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); // POLYGON ((20.0 -60.4, 23.1 -60.4, 23.1 -60.3, 20.0 -60.3,20.0 -60.4)) GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -60.4, 23.1, -60.4, 23.1, -60.3, 20.0, -60.3); GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -60.4, 23.1, -60.4, 23.1, -60.3, 20.0, -60.3); // Convex int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); // Concave rel = originalConcavePol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConcavePol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConcavePol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConcavePol); assertEquals(GeoArea.OVERLAPS, rel); // Complex rel = originalComplexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalComplexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalComplexPol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalComplexPol); assertEquals(GeoArea.OVERLAPS, rel); } /** Test with two shapes with no crossing edges and all points inside in convex case. */ @Test public void testGeoSimplePolygon3() { // POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 // -60.257713, 19.845091 -60.452631)) disjoint GeoPolygon originalConvexPol = buildConvexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalConcavePol = buildConcaveGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalComplexPol = buildComplexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); // POLYGON ((20.0 -61.1, 20.1 -61.1, 20.1 -60.5, 20.0 -60.5,20.0 -61.1)) GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -61.1, 20.1, -61.1, 20.1, -60.5, 20.0, -60.5); GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -61.1, 20.1, -61.1, 20.1, -60.5, 20.0, -60.5); // Convex int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.WITHIN, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.CONTAINS, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); // Concave rel = originalConcavePol.getRelationship(polConcave); assertEquals(GeoArea.CONTAINS, rel); rel = polConcave.getRelationship(originalConcavePol); assertEquals(GeoArea.WITHIN, rel); // check rel = originalConcavePol.getRelationship(polConvex); assertEquals(GeoArea.DISJOINT, rel); rel = polConvex.getRelationship(originalConcavePol); assertEquals(GeoArea.DISJOINT, rel); // Complex rel = originalComplexPol.getRelationship(polConvex); assertEquals(GeoArea.WITHIN, rel); rel = polConvex.getRelationship(originalComplexPol); assertEquals(GeoArea.CONTAINS, rel); rel = originalComplexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalComplexPol); assertEquals(GeoArea.OVERLAPS, rel); } /** Test with two shapes with crossing edges and no points inside in convex case. */ @Test public void testGeoSimplePolygon4() { // POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 // -60.257713, 19.845091 -60.452631)) disjoint GeoPolygon originalConvexPol = buildConvexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalConcavePol = buildConcaveGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalComplexPol = buildComplexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); // POLYGON ((20.0 -62.4, 20.1 -62.4, 20.1 -60.3, 20.0 -60.3,20.0 -62.4)) intersects no points // inside GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -62.4, 20.1, -62.4, 20.1, -60.3, 20.0, -60.3); GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -62.4, 20.1, -62.4, 20.1, -60.3, 20.0, -60.3); // Convex int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); // concave rel = originalConcavePol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConcavePol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConcavePol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConcavePol); assertEquals(GeoArea.OVERLAPS, rel); // Complex rel = originalComplexPol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalComplexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalComplexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalComplexPol); assertEquals(GeoArea.OVERLAPS, rel); } /** Test with two shapes with no crossing edges and polygon in hole in convex case. */ @Test public void testGeoSimplePolygonWithHole1() { // POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, // -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7); GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, -135, -30, -137, -30, -137, -31, hole); GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, -135, -30, -137, -30, -137, -31); GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole( -135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7, holeInv); // POLYGON((-135.7 -30.6, -135.7 -30.45, -136 -30.45, -136 -30.6, -135.7 -30.6)) in the hole GeoPolygon polConvex = buildConvexGeoPolygon(-135.7, -30.6, -135.7, -30.45, -136, -30.45, -136, -30.6); GeoPolygon polConcave = buildConcaveGeoPolygon(-135.7, -30.6, -135.7, -30.45, -136, -30.45, -136, -30.6); int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.DISJOINT, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.DISJOINT, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.CONTAINS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.WITHIN, rel); rel = originalConvexPolInv.getRelationship(polConvex); assertEquals(GeoArea.DISJOINT, rel); rel = polConvex.getRelationship(originalConvexPolInv); assertEquals(GeoArea.DISJOINT, rel); rel = originalConvexPolInv.getRelationship(polConcave); assertEquals(GeoArea.CONTAINS, rel); rel = polConcave.getRelationship(originalConvexPolInv); assertEquals(GeoArea.WITHIN, rel); } /** Test with two shapes with crossing edges in hole and some points inside in convex case. */ @Test public void testGeoSimplePolygonWithHole2() { // POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, // -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7); GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, -135, -30, -137, -30, -137, -31, hole); GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, -135, -30, -137, -30, -137, -31); GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole( -135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7, holeInv); // POLYGON((-135.5 -31.2, -135.5 -30.8, -136 -30.8, -136 -31.2, -135.5 -31.2)) intersects the // hole GeoPolygon polConvex = buildConvexGeoPolygon(-135.5, -30.2, -135.5, -30.8, -136, -30.8, -136, -30.2); GeoPolygon polConcave = buildConcaveGeoPolygon(-135.5, -30.2, -135.5, -30.8, -136, -30.8, -136, -30.2); int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPolInv.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConvexPolInv); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPolInv.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPolInv); assertEquals(GeoArea.OVERLAPS, rel); } /** Test with two shapes with crossing edges and some points inside in convex case. */ @Test public void testGeoSimplePolygonWithHole3() { // POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, // -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7); GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, -135, -30, -137, -30, -137, -31, hole); GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, -135, -30, -137, -30, -137, -31); GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole( -135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7, holeInv); // POLYGON((-135.2 -30.8, -135.2 -30.2, -136.8 -30.2, -136.8 -30.8, -135.2 -30.8)) inside the // polygon covering the hole GeoPolygon polConvex = buildConvexGeoPolygon(-135.2, -30.8, -135.2, -30.3, -136.8, -30.2, -136.8, -30.8); GeoPolygon polConcave = buildConcaveGeoPolygon(-135.2, -30.8, -135.2, -30.3, -136.8, -30.2, -136.8, -30.8); int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPolInv.getRelationship(polConvex); assertEquals(GeoArea.OVERLAPS, rel); rel = polConvex.getRelationship(originalConvexPolInv); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPolInv.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPolInv); assertEquals(GeoArea.OVERLAPS, rel); } /** Test with two shapes with no crossing edges and all points inside in convex case. */ @Test public void testGeoSimplePolygonWithHole4() { // POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, // -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7); GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, -135, -30, -137, -30, -137, -31, hole); GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, -135, -30, -137, -30, -137, -31); GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole( -135.5, -30.7, -135.5, -30.4, -136.5, -30.4, -136.5, -30.7, holeInv); // POLYGON((-135.7 -30.3, -135.7 -30.2, -136 -30.2, -136 -30.3, -135.7 -30.3))inside the polygon GeoPolygon polConvex = buildConvexGeoPolygon(-135.7, -30.3, -135.7, -30.2, -136, -30.2, -136, -30.3); GeoPolygon polConcave = buildConcaveGeoPolygon(-135.7, -30.3, -135.7, -30.2, -136, -30.2, -136, -30.3); int rel = originalConvexPol.getRelationship(polConvex); assertEquals(GeoArea.WITHIN, rel); rel = polConvex.getRelationship(originalConvexPol); assertEquals(GeoArea.CONTAINS, rel); rel = originalConvexPol.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConvexPolInv.getRelationship(polConvex); assertEquals(GeoArea.WITHIN, rel); rel = polConvex.getRelationship(originalConvexPolInv); assertEquals(GeoArea.CONTAINS, rel); rel = originalConvexPolInv.getRelationship(polConcave); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(originalConvexPolInv); assertEquals(GeoArea.OVERLAPS, rel); } @Test public void testGeoSimplePolygonWithCircle() { // POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 // -60.257713, 19.845091 -60.452631)) disjoint GeoPolygon originalConvexPol = buildConvexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalConcavePol = buildConcaveGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalComplexPol = buildComplexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoCircle outCircle = GeoCircleFactory.makeGeoCircle( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-70), Geo3DUtil.fromDegrees(23), Geo3DUtil.fromDegrees(1)); int rel = originalConvexPol.getRelationship(outCircle); assertEquals(GeoArea.DISJOINT, rel); rel = originalConcavePol.getRelationship(outCircle); assertEquals(GeoArea.WITHIN, rel); rel = originalComplexPol.getRelationship(outCircle); assertEquals(GeoArea.DISJOINT, rel); GeoCircle overlapCircle = GeoCircleFactory.makeGeoCircle( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.5), Geo3DUtil.fromDegrees(20), Geo3DUtil.fromDegrees(1)); rel = originalConvexPol.getRelationship(overlapCircle); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConcavePol.getRelationship(overlapCircle); assertEquals(GeoArea.OVERLAPS, rel); rel = originalComplexPol.getRelationship(overlapCircle); assertEquals(GeoArea.OVERLAPS, rel); GeoCircle inCircle = GeoCircleFactory.makeGeoCircle( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(21), Geo3DUtil.fromDegrees(0.1)); rel = originalConvexPol.getRelationship(inCircle); assertEquals(GeoArea.WITHIN, rel); rel = originalConcavePol.getRelationship(inCircle); assertEquals(GeoArea.DISJOINT, rel); rel = originalComplexPol.getRelationship(inCircle); assertEquals(GeoArea.WITHIN, rel); GeoCircle onCircle = GeoCircleFactory.makeGeoCircle( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(21), Geo3DUtil.fromDegrees(10.)); rel = originalConvexPol.getRelationship(onCircle); assertEquals(GeoArea.CONTAINS, rel); rel = originalConcavePol.getRelationship(onCircle); assertEquals(GeoArea.OVERLAPS, rel); rel = originalComplexPol.getRelationship(onCircle); assertEquals(GeoArea.CONTAINS, rel); } @Test public void testGeoSimplePolygonWithBBox() { // POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 // -60.257713, 19.845091 -60.452631)) disjoint GeoPolygon originalConvexPol = buildConvexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalConcavePol = buildConcaveGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoPolygon originalComplexPol = buildComplexGeoPolygon( 19.84509, -60.452631, 20.119948, -61.655652, 23.207901, -61.453298, 22.820804, -60.257713); GeoBBox outRectangle = GeoBBoxFactory.makeGeoBBox( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-69), Geo3DUtil.fromDegrees(-70), Geo3DUtil.fromDegrees(22), Geo3DUtil.fromDegrees(23)); int rel = originalConvexPol.getRelationship(outRectangle); assertEquals(GeoArea.DISJOINT, rel); rel = outRectangle.getRelationship(originalConvexPol); assertEquals(GeoArea.DISJOINT, rel); rel = originalConcavePol.getRelationship(outRectangle); assertEquals(GeoArea.WITHIN, rel); rel = originalComplexPol.getRelationship(outRectangle); assertEquals(GeoArea.DISJOINT, rel); GeoBBox overlapRectangle = GeoBBoxFactory.makeGeoBBox( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(-62), Geo3DUtil.fromDegrees(22), Geo3DUtil.fromDegrees(23)); rel = originalConvexPol.getRelationship(overlapRectangle); assertEquals(GeoArea.OVERLAPS, rel); rel = overlapRectangle.getRelationship(originalConvexPol); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConcavePol.getRelationship(overlapRectangle); assertEquals(GeoArea.OVERLAPS, rel); rel = originalComplexPol.getRelationship(overlapRectangle); assertEquals(GeoArea.OVERLAPS, rel); GeoBBox inRectangle = GeoBBoxFactory.makeGeoBBox( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(-61.1), Geo3DUtil.fromDegrees(22.5), Geo3DUtil.fromDegrees(23)); rel = originalConvexPol.getRelationship(inRectangle); assertEquals(GeoArea.WITHIN, rel); rel = inRectangle.getRelationship(originalConvexPol); assertEquals(GeoArea.CONTAINS, rel); rel = originalConcavePol.getRelationship(inRectangle); assertEquals(GeoArea.DISJOINT, rel); rel = originalComplexPol.getRelationship(inRectangle); assertEquals(GeoArea.WITHIN, rel); GeoBBox onRectangle = GeoBBoxFactory.makeGeoBBox( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-59), Geo3DUtil.fromDegrees(-64.1), Geo3DUtil.fromDegrees(18.5), Geo3DUtil.fromDegrees(27)); rel = originalConvexPol.getRelationship(onRectangle); assertEquals(GeoArea.CONTAINS, rel); rel = onRectangle.getRelationship(originalConvexPol); assertEquals(GeoArea.WITHIN, rel); rel = originalConcavePol.getRelationship(onRectangle); assertEquals(GeoArea.OVERLAPS, rel); rel = originalComplexPol.getRelationship(onRectangle); assertEquals(GeoArea.CONTAINS, rel); } @Test public void testGeoSimplePolygonWithComposite() { GeoShape shape = getCompositeShape(); // POLYGON((-145.8555 -5.13, -145.8540 -5.13, -145.8540 -5.12, -145.8555 -5.12, -145.8555 // -5.13)) GeoPolygon polConvex = buildConvexGeoPolygon( -145.8555, -5.13, -145.8540, -5.13, -145.8540, -5.12, -145.8555, -5.12); GeoPolygon polConcave = buildConcaveGeoPolygon( -145.8555, -5.13, -145.8540, -5.13, -145.8540, -5.12, -145.8555, -5.12); int rel = polConvex.getRelationship(shape); assertEquals(GeoArea.DISJOINT, rel); rel = polConcave.getRelationship(shape); assertEquals(GeoArea.WITHIN, rel); // POLYGON((-145.8555 -5.13, -145.85 -5.13, -145.85 -5.12, -145.8555 -5.12, -145.8555 -5.13)) polConvex = buildConvexGeoPolygon(-145.8555, -5.13, -145.85, -5.13, -145.85, -5.12, -145.8555, -5.12); polConcave = buildConcaveGeoPolygon(-145.8555, -5.13, -145.85, -5.13, -145.85, -5.12, -145.8555, -5.12); rel = polConvex.getRelationship(shape); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(shape); assertEquals(GeoArea.OVERLAPS, rel); // POLYGON((-146 -5.18, -145.854 -5.18, -145.854 -5.11, -146 -5.11, -146 -5.18)) // Case overlaping on of the shapes polConvex = buildConvexGeoPolygon(-146, -5.18, -145.854, -5.18, -145.854, -5.11, -146, -5.11); polConcave = buildConcaveGeoPolygon(-146, -5.18, -145.854, -5.18, -145.854, -5.11, -146, -5.11); rel = polConvex.getRelationship(shape); assertEquals(GeoArea.OVERLAPS, rel); rel = polConcave.getRelationship(shape); assertEquals(GeoArea.OVERLAPS, rel); // POLYGON((-145.88 -5.13, -145.87 -5.13, -145.87 -5.12, -145.88 -5.12, -145.88 -5.13)) polConvex = buildConvexGeoPolygon(-145.88, -5.13, -145.87, -5.13, -145.87, -5.12, -145.88, -5.12); polConcave = buildConcaveGeoPolygon(-145.88, -5.13, -145.87, -5.13, -145.87, -5.12, -145.88, -5.12); rel = polConvex.getRelationship(shape); assertEquals(GeoArea.CONTAINS, rel); rel = polConcave.getRelationship(shape); assertEquals(GeoArea.OVERLAPS, rel); } @Test public void testDegeneratedPointIntersectShape() { GeoBBox bBox1 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 1, 0, 0, 1); GeoBBox bBox2 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, 1, 1, 1, 1); int rel = bBox1.getRelationship(bBox2); // OVERLAPS instead of WITHIN. In this case the degenerated point lies on the edge of the shape. // intersects() returns true for one plane of the BBox and hence method return OVERLAPS. assertEquals(GeoArea.OVERLAPS, rel); rel = bBox2.getRelationship(bBox1); // The degenerated point cannot compute if it is on the edge. Uses WITHIN that is true // and therefore CONTAINS assertEquals(GeoArea.CONTAINS, rel); } @Test public void testDegeneratedPointInPole() { GeoBBox bBox1 = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.PI * 0.5, Math.PI * 0.5, 0, 0); GeoPoint point = new GeoPoint(PlanetModel.SPHERE, Math.PI * 0.5, Math.PI); System.out.println("bbox1 = " + bBox1 + "; point = " + point); assertTrue(bBox1.isWithin(point)); } @Test public void testDegeneratePathShape() { GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, 0, 0); GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, 0, 1); GeoPoint[] pointPath1 = new GeoPoint[] {point1, point2}; GeoPath path1 = GeoPathFactory.makeGeoPath(PlanetModel.SPHERE, 0, pointPath1); GeoPath path2 = GeoPathFactory.makeGeoPath(PlanetModel.SPHERE, 1, pointPath1); System.out.println("path1 = " + path1 + " path2 = " + path2); int rel = path1.getRelationship(path2); // if an end point is inside the shape it will always return intersects assertEquals(GeoArea.CONTAINS, rel); // should be contains? rel = path2.getRelationship(path1); assertEquals(GeoArea.WITHIN, rel); } private GeoPolygon buildConvexGeoPolygon( double lon1, double lat1, double lon2, double lat2, double lon3, double lat3, double lon4, double lat4) { GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List<GeoPoint> points = new ArrayList<>(); points.add(point1); points.add(point2); points.add(point3); points.add(point4); return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); } private GeoPolygon buildConcaveGeoPolygon( double lon1, double lat1, double lon2, double lat2, double lon3, double lat3, double lon4, double lat4) { GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List<GeoPoint> points = new ArrayList<>(); points.add(point1); points.add(point2); points.add(point3); points.add(point4); return GeoPolygonFactory.makeGeoConcavePolygon(PlanetModel.SPHERE, points); } private GeoPolygon buildComplexGeoPolygon( double lon1, double lat1, double lon2, double lat2, double lon3, double lat3, double lon4, double lat4) { GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List<GeoPoint> points = new ArrayList<>(); points.add(point1); points.add(point2); points.add(point3); points.add(point4); GeoPolygonFactory.PolygonDescription pd = new GeoPolygonFactory.PolygonDescription(points); return GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.SPHERE, Collections.singletonList(pd)); } private GeoPolygon buildConvexGeoPolygonWithHole( double lon1, double lat1, double lon2, double lat2, double lon3, double lat3, double lon4, double lat4, GeoPolygon hole) { GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List<GeoPoint> points = new ArrayList<>(); points.add(point1); points.add(point2); points.add(point3); points.add(point4); // return new GeoConvexPolygon(PlanetModel.SPHERE,points, Collections.singletonList(hole)); return GeoPolygonFactory.makeGeoPolygon( PlanetModel.SPHERE, points, Collections.singletonList(hole)); } private GeoPolygon buildConcaveGeoPolygonWithHole( double lon1, double lat1, double lon2, double lat2, double lon3, double lat3, double lon4, double lat4, GeoPolygon hole) { GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List<GeoPoint> points = new ArrayList<>(); points.add(point1); points.add(point2); points.add(point3); points.add(point4); return GeoPolygonFactory.makeGeoConcavePolygon( PlanetModel.SPHERE, points, Collections.singletonList(hole)); } private GeoShape getCompositeShape() { // MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512 // -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)), // ((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171, // -145.918610092 -5.17512738429, -145.8563923 -5.17527125408))) GeoPoint point1 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17543698881), Geo3DUtil.fromDegrees(-145.790967486)); GeoPoint point2 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11348060995), Geo3DUtil.fromDegrees(-145.790854979)); GeoPoint point3 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11339421216), Geo3DUtil.fromDegrees(-145.853073512)); GeoPoint point4 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17535061936), Geo3DUtil.fromDegrees(-145.853192037)); final List<GeoPoint> points1 = new ArrayList<>(); points1.add(point1); points1.add(point2); points1.add(point3); points1.add(point4); GeoPolygon pol1 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points1); GeoPoint point5 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17527125408), Geo3DUtil.fromDegrees(-145.8563923)); GeoPoint point6 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11332154814), Geo3DUtil.fromDegrees(-145.856222168)); GeoPoint point7 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11317773171), Geo3DUtil.fromDegrees(-145.918433943)); GeoPoint point8 = new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17512738429), Geo3DUtil.fromDegrees(-145.918610092)); final List<GeoPoint> points2 = new ArrayList<>(); points2.add(point5); points2.add(point6); points2.add(point7); points2.add(point8); GeoPolygon pol2 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points2); GeoCompositeMembershipShape composite = new GeoCompositeMembershipShape(PlanetModel.SPHERE); composite.addShape(pol1); composite.addShape(pol2); return composite; } }
apache/olingo-odata4
36,067
lib/server-test/src/test/java/org/apache/olingo/server/core/serializer/json/EdmAssistedJsonSerializerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.server.core.serializer.json; import java.io.IOException; import java.math.BigDecimal; import java.math.MathContext; import java.math.RoundingMode; import java.net.URI; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.TimeZone; import java.util.UUID; import org.apache.commons.io.IOUtils; import org.apache.olingo.commons.api.data.AbstractEntityCollection; import org.apache.olingo.commons.api.data.ComplexValue; import org.apache.olingo.commons.api.data.ContextURL; import org.apache.olingo.commons.api.data.Entity; import org.apache.olingo.commons.api.data.EntityCollection; import org.apache.olingo.commons.api.data.Link; import org.apache.olingo.commons.api.data.Property; import org.apache.olingo.commons.api.data.ValueType; import org.apache.olingo.commons.api.edm.EdmEntityContainer; import org.apache.olingo.commons.api.edm.EdmEntitySet; import org.apache.olingo.commons.api.edmx.EdmxReference; import org.apache.olingo.commons.api.format.ContentType; import org.apache.olingo.server.api.OData; import org.apache.olingo.server.api.ServiceMetadata; import org.apache.olingo.server.api.serializer.EdmAssistedSerializer; import org.apache.olingo.server.api.serializer.EdmAssistedSerializerOptions; import org.apache.olingo.server.api.serializer.SerializerException; import org.apache.olingo.server.tecsvc.MetadataETagSupport; import org.apache.olingo.server.tecsvc.provider.EdmTechProvider; import org.junit.Assert; import org.junit.Test; public class EdmAssistedJsonSerializerTest { private static final OData oData = OData.newInstance(); private static final ServiceMetadata metadata = oData.createServiceMetadata( new EdmTechProvider(), Collections.<EdmxReference> emptyList(), null); private static final EdmEntityContainer entityContainer = metadata.getEdm().getEntityContainer(); private final EdmAssistedSerializer serializer; private final EdmAssistedSerializer serializerMin; private final EdmAssistedSerializer serializerNone; public EdmAssistedJsonSerializerTest() throws SerializerException { serializer = oData.createEdmAssistedSerializer(ContentType.JSON_FULL_METADATA); serializerMin = oData.createEdmAssistedSerializer(ContentType.JSON); serializerNone = oData.createEdmAssistedSerializer(ContentType.JSON_NO_METADATA); } @Test public void entityCollectionSimple() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1.25F)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1)\"," + "\"value\":[{\"@odata.id\":null,\"Property1@odata.type\":\"#Single\",\"Property1\":1.25}]}", serialize(serializer, metadata, null, entityCollection, null)); } @Test public void entityCollectionWithEdm() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESTwoPrim"); Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, (short) 1)) .addProperty(new Property(null, "PropertyString", ValueType.PRIMITIVE, "test")) .addProperty(new Property(null, "AdditionalProperty", ValueType.PRIMITIVE, (byte) 42)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#ESTwoPrim\",\"value\":[{\"@odata.id\":null," + "\"PropertyInt16\":1,\"PropertyString\":\"test\"," + "\"AdditionalProperty@odata.type\":\"#SByte\",\"AdditionalProperty\":42}]}", serialize(serializer, metadata, entitySet, entityCollection, null)); } @Test public void entityCollection() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property0", ValueType.PRIMITIVE, null)) .addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1)); Calendar date = Calendar.getInstance(TimeZone.getTimeZone("GMT")); date.clear(); date.set(2000, 1, 29); entity.addProperty(new Property("Edm.Date", "Property2", ValueType.PRIMITIVE, date)) .addProperty(new Property("Edm.DateTimeOffset", "Property3", ValueType.PRIMITIVE, date)) .addProperty(new Property(null, "Property4", ValueType.COLLECTION_PRIMITIVE, Arrays.asList(true, false, null))); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); entityCollection.setCount(2); entityCollection.setNext(URI.create("nextLink")); Assert.assertEquals( "{\"@odata.context\":\"$metadata#EntitySet(Property0,Property1,Property2,Property3,Property4)\"," + "\"@odata.count\":2," + "\"value\":[{\"@odata.id\":null," + "\"Property0\":null," + "\"Property1@odata.type\":\"#Int32\",\"Property1\":1," + "\"Property2@odata.type\":\"#Date\",\"Property2\":\"2000-02-29\"," + "\"Property3@odata.type\":\"#DateTimeOffset\",\"Property3\":\"2000-02-29T00:00:00Z\"," + "\"Property4@odata.type\":\"#Collection(Boolean)\",\"Property4\":[true,false,null]}]," + "\"@odata.nextLink\":\"nextLink\"}", serialize(serializer, metadata, null, entityCollection, null)); } @Test public void entityCollectionIEEE754Compatible() throws Exception { EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, Long.MIN_VALUE)) .addProperty(new Property(null, "Property2", ValueType.PRIMITIVE, BigDecimal.valueOf(Long.MAX_VALUE, 10))) .addProperty(new Property("Edm.Byte", "Property3", ValueType.PRIMITIVE, 20))); entityCollection.setCount(3); Assert.assertEquals( "{\"@odata.context\":\"$metadata#EntitySet(Property1,Property2,Property3)\"," + "\"@odata.count\":\"3\"," + "\"value\":[{\"@odata.id\":null," + "\"Property1@odata.type\":\"#Int64\",\"Property1\":\"-9223372036854775808\"," + "\"Property2@odata.type\":\"#Decimal\",\"Property2\":\"922337203.6854775807\"," + "\"Property3@odata.type\":\"#Byte\",\"Property3\":20}]}", serialize( oData.createEdmAssistedSerializer( ContentType.create(ContentType.JSON_FULL_METADATA, ContentType.PARAMETER_IEEE754_COMPATIBLE, "true")), metadata, null, entityCollection, null)); } @Test public void entityCollectionWithComplexProperty() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1L)); ComplexValue complexValue = new ComplexValue(); complexValue.getValue().add(new Property(null, "Inner1", ValueType.PRIMITIVE, BigDecimal.TEN.scaleByPowerOfTen(-5))); Calendar time = Calendar.getInstance(TimeZone.getTimeZone("GMT")); time.clear(); time.set(Calendar.HOUR_OF_DAY, 13); time.set(Calendar.SECOND, 59); time.set(Calendar.MILLISECOND, 999); complexValue.getValue().add(new Property("Edm.TimeOfDay", "Inner2", ValueType.PRIMITIVE, time)); entity.addProperty(new Property("Namespace.ComplexType", "Property2", ValueType.COMPLEX, complexValue)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1,Property2)\"," + "\"value\":[{\"@odata.id\":null," + "\"Property1@odata.type\":\"#Int64\",\"Property1\":1," + "\"Property2\":{\"@odata.type\":\"#Namespace.ComplexType\"," + "\"Inner1@odata.type\":\"#Decimal\",\"Inner1\":0.00010," + "\"Inner2@odata.type\":\"#TimeOfDay\",\"Inner2\":\"13:00:59.999\"}}]}", serialize(serializer, metadata, null, entityCollection, null)); } @Test public void entityCollectionWithComplexCollection() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESMixPrimCollComp"); ComplexValue complexValue1 = new ComplexValue(); complexValue1.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 1)); complexValue1.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "one")); ComplexValue complexValue2 = new ComplexValue(); complexValue2.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 2)); complexValue2.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "two")); ComplexValue complexValue3 = new ComplexValue(); complexValue3.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 3)); complexValue3.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "three")); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "CollPropertyComp", ValueType.COLLECTION_COMPLEX, Arrays.asList(complexValue1, complexValue2, complexValue3)))); Assert.assertEquals("{\"@odata.context\":\"$metadata#ESMixPrimCollComp(CollPropertyComp)\"," + "\"value\":[{\"@odata.id\":null," + "\"CollPropertyComp\":[" + "{\"PropertyInt16\":1,\"PropertyString\":\"one\"}," + "{\"PropertyInt16\":2,\"PropertyString\":\"two\"}," + "{\"PropertyInt16\":3,\"PropertyString\":\"three\"}]}]}", serialize(serializer, metadata, entitySet, entityCollection, "CollPropertyComp")); } @Test public void entityCollectionWithEmptyCollection() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESMixPrimCollComp"); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "CollPropertyString", ValueType.COLLECTION_PRIMITIVE, Collections.emptyList()))); Assert.assertEquals( "{\"@odata.context\":\"$metadata#ESMixPrimCollComp(CollPropertyString)\"," + "\"value\":[{\"@odata.id\":null,\"CollPropertyString\":[]}]}", serialize(serializer, metadata, entitySet, entityCollection, "CollPropertyString")); } @Test public void expand() throws Exception { final Entity relatedEntity1 = new Entity().addProperty(new Property(null, "Related1", ValueType.PRIMITIVE, 1.5)); final Entity relatedEntity2 = new Entity().addProperty(new Property(null, "Related1", ValueType.PRIMITIVE, 2.75)); EntityCollection target = new EntityCollection(); target.getEntities().add(relatedEntity1); target.getEntities().add(relatedEntity2); Link link = new Link(); link.setTitle("NavigationProperty"); link.setInlineEntitySet(target); Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, (short) 1)); entity.getNavigationLinks().add(link); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1,NavigationProperty(Related1))\"," + "\"value\":[{\"@odata.id\":null," + "\"Property1@odata.type\":\"#Int16\",\"Property1\":1," + "\"NavigationProperty\":[" + "{\"@odata.id\":null,\"Related1@odata.type\":\"#Double\",\"Related1\":1.5}," + "{\"@odata.id\":null,\"Related1@odata.type\":\"#Double\",\"Related1\":2.75}]}]}", serialize(serializer, metadata, null, entityCollection, "Property1,NavigationProperty(Related1)")); } @Test public void expandWithEdm() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESTwoPrim"); Entity entity = new Entity() .addProperty(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, (short) 42)) .addProperty(new Property(null, "PropertyString", ValueType.PRIMITIVE, "test")); final Entity target = new Entity() .addProperty(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, (short) 2)) .addProperty(new Property(null, "PropertyByte", ValueType.PRIMITIVE, 3L)); Link link = new Link(); link.setTitle("NavPropertyETAllPrimOne"); link.setInlineEntity(target); entity.getNavigationLinks().add(link); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#ESTwoPrim\",\"value\":[{\"@odata.id\":null," + "\"PropertyInt16\":42,\"PropertyString\":\"test\"," + "\"NavPropertyETAllPrimOne\":{\"@odata.id\":null,\"PropertyInt16\":2,\"PropertyByte\":3}}]}", serialize(serializer, metadata, entitySet, entityCollection, null)); } @Test public void metadata() throws Exception { final ServiceMetadata metadata = oData.createServiceMetadata(null, Collections.<EdmxReference> emptyList(), new MetadataETagSupport("W/\"42\"")); Entity entity = new Entity(); entity.setType("Namespace.EntityType"); entity.setId(URI.create("ID")); entity.setETag("W/\"1000\""); Link link = new Link(); link.setHref("editLink"); entity.setEditLink(link); entity.setMediaContentSource(URI.create("media")); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, UUID.fromString("12345678-ABCD-1234-CDEF-123456789012"))); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1)\"," + "\"@odata.metadataEtag\":\"W/\\\"42\\\"\",\"value\":[{" + "\"@odata.etag\":\"W/\\\"1000\\\"\"," + "\"@odata.type\":\"#Namespace.EntityType\"," + "\"@odata.id\":\"ID\"," + "\"Property1@odata.type\":\"#Guid\",\"Property1\":\"12345678-abcd-1234-cdef-123456789012\"," + "\"@odata.editLink\":\"editLink\"," + "\"@odata.mediaReadLink\":\"editLink/$value\"}]}", serialize(serializer, metadata, null, entityCollection, null)); Assert.assertEquals("{\"value\":[{\"Property1\":\"12345678-abcd-1234-cdef-123456789012\"}]}", serialize(oData.createEdmAssistedSerializer(ContentType.JSON_NO_METADATA), metadata, null, entityCollection, null)); } @Test(expected = SerializerException.class) public void enumType() throws Exception { EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add( new Entity().addProperty(new Property(null, "Property1", ValueType.ENUM, 42))); serializer.entityCollection(metadata, null, entityCollection, null); } @Test(expected = SerializerException.class) public void collectionEnumType() throws Exception { EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add( new Entity().addProperty(new Property(null, "Property1", ValueType.COLLECTION_ENUM, Arrays.asList(42)))); serializer.entityCollection(metadata, null, entityCollection, null); } @Test(expected = SerializerException.class) public void geoType() throws Exception { EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add( new Entity().addProperty(new Property(null, "Property1", ValueType.GEOSPATIAL, 1))); serializer.entityCollection(metadata, null, entityCollection, null); } @Test(expected = SerializerException.class) public void unsupportedType() throws Exception { EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add( new Entity().addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, TimeZone.getDefault()))); serializer.entityCollection(metadata, null, entityCollection, null); } @Test(expected = SerializerException.class) public void wrongValueForType() throws Exception { EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add( new Entity().addProperty(new Property("Edm.SByte", "Property1", ValueType.PRIMITIVE, "-1"))); serializer.entityCollection(metadata, null, entityCollection, null); } @Test(expected = SerializerException.class) public void wrongValueForPropertyFacet() throws Exception { EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add( new Entity().addProperty( new Property(null, "PropertyDecimal", ValueType.PRIMITIVE, BigDecimal.ONE.scaleByPowerOfTen(-11)))); serializer.entityCollection(metadata, entityContainer.getEntitySet("ESAllPrim").getEntityType(), entityCollection, null); } @Test(expected = SerializerException.class) public void wrongValueForPropertyFacetInComplexProperty() throws Exception { ComplexValue innerComplexValue = new ComplexValue(); innerComplexValue.getValue().add(new Property(null, "PropertyDecimal", ValueType.PRIMITIVE, BigDecimal.ONE.scaleByPowerOfTen(-6))); ComplexValue complexValue = new ComplexValue(); complexValue.getValue().add(new Property(null, "PropertyComp", ValueType.COMPLEX, innerComplexValue)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add( new Entity().addProperty( new Property(null, "CollPropertyComp", ValueType.COLLECTION_COMPLEX, Collections.singletonList(complexValue)))); serializer.entityCollection(metadata, entityContainer.getEntitySet("ESKeyNav").getEntityType(), entityCollection, null); } private String serialize(final EdmAssistedSerializer serializer, final ServiceMetadata metadata, final EdmEntitySet edmEntitySet, final AbstractEntityCollection entityCollection, final String selectList) throws SerializerException, IOException { ContextURL.Builder contextURLBuilder = ContextURL.with(); contextURLBuilder = edmEntitySet == null ? contextURLBuilder.entitySetOrSingletonOrType("EntitySet") : contextURLBuilder.entitySet(edmEntitySet); if (selectList == null) { if (edmEntitySet == null) { StringBuilder names = new StringBuilder(); for (final Property property : entityCollection.iterator().next().getProperties()) { names.append(names.length() > 0 ? ',' : "").append(property.getName()); } contextURLBuilder = contextURLBuilder.selectList(names.toString()); } } else { contextURLBuilder = contextURLBuilder.selectList(selectList); } return IOUtils.toString( serializer.entityCollection(metadata, edmEntitySet == null ? null : edmEntitySet.getEntityType(), entityCollection, EdmAssistedSerializerOptions.with().contextURL(contextURLBuilder.build()).build()) .getContent()); } @Test public void entityCollectionSimpleMetadataMin() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1.25F)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1)\"," + "\"value\":[{\"Property1\":1.25}]}", serialize(serializerMin, metadata, null, entityCollection, null)); } @Test public void entityCollectionSimpleMetadataNone() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1.25F)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"value\":[{\"Property1\":1.25}]}", serialize(serializerNone, metadata, null, entityCollection, null)); } @Test public void entityCollectionMetadataMin() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property0", ValueType.PRIMITIVE, null)) .addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1)); Calendar date = Calendar.getInstance(TimeZone.getTimeZone("GMT")); date.clear(); date.set(2000, 1, 29); entity.addProperty(new Property("Edm.Date", "Property2", ValueType.PRIMITIVE, date)) .addProperty(new Property("Edm.DateTimeOffset", "Property3", ValueType.PRIMITIVE, date)) .addProperty(new Property(null, "Property4", ValueType.COLLECTION_PRIMITIVE, Arrays.asList(true, false, null))); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); entityCollection.setCount(2); entityCollection.setNext(URI.create("nextLink")); Assert.assertEquals( "{\"@odata.context\":\"$metadata#EntitySet(Property0,Property1,Property2,Property3,Property4)\"," + "\"@odata.count\":2," + "\"value\":[{" + "\"Property0\":null," + "\"Property1\":1," + "\"Property2\":\"2000-02-29\"," + "\"Property3\":\"2000-02-29T00:00:00Z\"," + "\"Property4\":[true,false,null]}]," + "\"@odata.nextLink\":\"nextLink\"}", serialize(serializerMin, metadata, null, entityCollection, null)); } @Test public void entityCollectionMetadataNone() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property0", ValueType.PRIMITIVE, null)) .addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1)); Calendar date = Calendar.getInstance(TimeZone.getTimeZone("GMT")); date.clear(); date.set(2000, 1, 29); entity.addProperty(new Property("Edm.Date", "Property2", ValueType.PRIMITIVE, date)) .addProperty(new Property("Edm.DateTimeOffset", "Property3", ValueType.PRIMITIVE, date)) .addProperty(new Property(null, "Property4", ValueType.COLLECTION_PRIMITIVE, Arrays.asList(true, false, null))); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); entityCollection.setCount(2); entityCollection.setNext(URI.create("nextLink")); Assert.assertEquals( "{" + "\"@odata.count\":2," + "\"value\":[{" + "\"Property0\":null," + "\"Property1\":1," + "\"Property2\":\"2000-02-29\"," + "\"Property3\":\"2000-02-29T00:00:00Z\"," + "\"Property4\":[true,false,null]}]," + "\"@odata.nextLink\":\"nextLink\"}", serialize(serializerNone, metadata, null, entityCollection, null)); } @Test public void entityCollectionWithComplexPropertyMetadataMin() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1L)); ComplexValue complexValue = new ComplexValue(); complexValue.getValue().add(new Property(null, "Inner1", ValueType.PRIMITIVE, BigDecimal.TEN.scaleByPowerOfTen(-5))); Calendar time = Calendar.getInstance(TimeZone.getTimeZone("GMT")); time.clear(); time.set(Calendar.HOUR_OF_DAY, 13); time.set(Calendar.SECOND, 59); time.set(Calendar.MILLISECOND, 999); complexValue.getValue().add(new Property("Edm.TimeOfDay", "Inner2", ValueType.PRIMITIVE, time)); entity.addProperty(new Property("Namespace.ComplexType", "Property2", ValueType.COMPLEX, complexValue)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1,Property2)\"," + "\"value\":[{" + "\"Property1\":1," + "\"Property2\":{" + "\"Inner1\":0.00010," + "\"Inner2\":\"13:00:59.999\"}}]}", serialize(serializerMin, metadata, null, entityCollection, null)); } @Test public void entityCollectionWithComplexPropertyMetadataNone() throws Exception { Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, 1L)); ComplexValue complexValue = new ComplexValue(); complexValue.getValue().add(new Property(null, "Inner1", ValueType.PRIMITIVE, BigDecimal.TEN.scaleByPowerOfTen(-5))); Calendar time = Calendar.getInstance(TimeZone.getTimeZone("GMT")); time.clear(); time.set(Calendar.HOUR_OF_DAY, 13); time.set(Calendar.SECOND, 59); time.set(Calendar.MILLISECOND, 999); complexValue.getValue().add(new Property("Edm.TimeOfDay", "Inner2", ValueType.PRIMITIVE, time)); entity.addProperty(new Property("Namespace.ComplexType", "Property2", ValueType.COMPLEX, complexValue)); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{" + "\"value\":[{" + "\"Property1\":1," + "\"Property2\":{" + "\"Inner1\":0.00010," + "\"Inner2\":\"13:00:59.999\"}}]}", serialize(serializerNone, metadata, null, entityCollection, null)); } @Test public void entityCollectionWithComplexCollectionMin() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESMixPrimCollComp"); ComplexValue complexValue1 = new ComplexValue(); complexValue1.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 1)); complexValue1.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "one")); ComplexValue complexValue2 = new ComplexValue(); complexValue2.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 2)); complexValue2.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "two")); ComplexValue complexValue3 = new ComplexValue(); complexValue3.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 3)); complexValue3.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "three")); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "CollPropertyComp", ValueType.COLLECTION_COMPLEX, Arrays.asList(complexValue1, complexValue2, complexValue3)))); Assert.assertEquals("{\"@odata.context\":\"$metadata#ESMixPrimCollComp(CollPropertyComp)\"," + "\"value\":[{" + "\"CollPropertyComp\":[" + "{\"PropertyInt16\":1,\"PropertyString\":\"one\"}," + "{\"PropertyInt16\":2,\"PropertyString\":\"two\"}," + "{\"PropertyInt16\":3,\"PropertyString\":\"three\"}]}]}", serialize(serializerMin, metadata, entitySet, entityCollection, "CollPropertyComp")); } @Test public void entityCollectionWithComplexCollectionNone() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESMixPrimCollComp"); ComplexValue complexValue1 = new ComplexValue(); complexValue1.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 1)); complexValue1.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "one")); ComplexValue complexValue2 = new ComplexValue(); complexValue2.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 2)); complexValue2.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "two")); ComplexValue complexValue3 = new ComplexValue(); complexValue3.getValue().add(new Property(null, "PropertyInt16", ValueType.PRIMITIVE, 3)); complexValue3.getValue().add(new Property(null, "PropertyString", ValueType.PRIMITIVE, "three")); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "CollPropertyComp", ValueType.COLLECTION_COMPLEX, Arrays.asList(complexValue1, complexValue2, complexValue3)))); Assert.assertEquals("{" + "\"value\":[{" + "\"CollPropertyComp\":[" + "{\"PropertyInt16\":1,\"PropertyString\":\"one\"}," + "{\"PropertyInt16\":2,\"PropertyString\":\"two\"}," + "{\"PropertyInt16\":3,\"PropertyString\":\"three\"}]}]}", serialize(serializerNone, metadata, entitySet, entityCollection, "CollPropertyComp")); } @Test public void entityCollectionWithEmptyCollectionMin() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESMixPrimCollComp"); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "CollPropertyString", ValueType.COLLECTION_PRIMITIVE, Collections.emptyList()))); Assert.assertEquals( "{\"@odata.context\":\"$metadata#ESMixPrimCollComp(CollPropertyString)\"," + "\"value\":[{\"CollPropertyString\":[]}]}", serialize(serializerMin, metadata, entitySet, entityCollection, "CollPropertyString")); } @Test public void entityCollectionWithEmptyCollectionNone() throws Exception { final EdmEntitySet entitySet = entityContainer.getEntitySet("ESMixPrimCollComp"); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "CollPropertyString", ValueType.COLLECTION_PRIMITIVE, Collections.emptyList()))); Assert.assertEquals( "{" + "\"value\":[{\"CollPropertyString\":[]}]}", serialize(serializerNone, metadata, entitySet, entityCollection, "CollPropertyString")); } @Test public void expandMetadataMin() throws Exception { final Entity relatedEntity1 = new Entity().addProperty(new Property(null, "Related1", ValueType.PRIMITIVE, 1.5)); final Entity relatedEntity2 = new Entity().addProperty(new Property(null, "Related1", ValueType.PRIMITIVE, 2.75)); EntityCollection target = new EntityCollection(); target.getEntities().add(relatedEntity1); target.getEntities().add(relatedEntity2); Link link = new Link(); link.setTitle("NavigationProperty"); link.setInlineEntitySet(target); Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, (short) 1)); entity.getNavigationLinks().add(link); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1,NavigationProperty(Related1))\"," + "\"value\":[{" + "\"Property1\":1," + "\"NavigationProperty\":[" + "{\"Related1\":1.5}," + "{\"Related1\":2.75}]}]}", serialize(serializerMin, metadata, null, entityCollection, "Property1,NavigationProperty(Related1)")); } @Test public void expandMetadataNone() throws Exception { final Entity relatedEntity1 = new Entity().addProperty(new Property(null, "Related1", ValueType.PRIMITIVE, 1.5)); final Entity relatedEntity2 = new Entity().addProperty(new Property(null, "Related1", ValueType.PRIMITIVE, 2.75)); EntityCollection target = new EntityCollection(); target.getEntities().add(relatedEntity1); target.getEntities().add(relatedEntity2); Link link = new Link(); link.setTitle("NavigationProperty"); link.setInlineEntitySet(target); Entity entity = new Entity(); entity.setId(null); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, (short) 1)); entity.getNavigationLinks().add(link); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{" + "\"value\":[{" + "\"Property1\":1," + "\"NavigationProperty\":[" + "{\"Related1\":1.5}," + "{\"Related1\":2.75}]}]}", serialize(serializerNone, metadata, null, entityCollection, "Property1,NavigationProperty(Related1)")); } @Test public void metadataMin() throws Exception { final ServiceMetadata metadata = oData.createServiceMetadata(null, Collections.<EdmxReference> emptyList(), new MetadataETagSupport("W/\"42\"")); Entity entity = new Entity(); entity.setType("Namespace.EntityType"); entity.setId(URI.create("ID")); entity.setETag("W/\"1000\""); Link link = new Link(); link.setHref("editLink"); entity.setEditLink(link); entity.setMediaContentSource(URI.create("media")); entity.addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, UUID.fromString("12345678-ABCD-1234-CDEF-123456789012"))); EntityCollection entityCollection = new EntityCollection(); entityCollection.getEntities().add(entity); Assert.assertEquals("{\"@odata.context\":\"$metadata#EntitySet(Property1)\"," + "\"@odata.metadataEtag\":\"W/\\\"42\\\"\",\"value\":[{" + "\"@odata.etag\":\"W/\\\"1000\\\"\"," + "\"Property1\":\"12345678-abcd-1234-cdef-123456789012\"," + "\"@odata.editLink\":\"editLink\"," + "\"@odata.mediaReadLink\":\"editLink/$value\"}]}", serialize(serializerMin, metadata, null, entityCollection, null)); } @Test public void entityCollectionWithBigDecimalProperty() throws Exception { EntityCollection entityCollection = new EntityCollection(); BigDecimal b = new BigDecimal(1.666666666666666666666666666666667); b.abs(new MathContext(0, RoundingMode.UNNECESSARY)); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, b))); Assert.assertTrue( serialize(serializerMin, metadata, null, entityCollection, null) .contains("1.6666666666666667406815349750104360282421112060546875")); } @Test public void entityCollectionWithBigDecimalPropertyIntegerInScientificNotation() throws Exception { EntityCollection entityCollection = new EntityCollection(); BigDecimal b = new BigDecimal("1.52E+4"); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, b))); Assert.assertTrue( serialize(serializerMin, metadata, null, entityCollection, null) .contains("15200")); } @Test public void entityCollectionWithBigDecimalPropertyInScientificNotation() throws Exception { EntityCollection entityCollection = new EntityCollection(); BigDecimal b = new BigDecimal("1.52123123E+4"); entityCollection.getEntities().add(new Entity() .addProperty(new Property(null, "Property1", ValueType.PRIMITIVE, b))); Assert.assertTrue( serialize(serializerMin, metadata, null, entityCollection, null) .contains("15212.3123")); } }
google/java-photoslibrary
37,536
photoslibraryapi/src/main/java/com/google/photos/library/v1/internal/stub/GrpcPhotosLibraryStub.java
/* * Copyright 2022 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.photos.library.v1.internal.stub; import static com.google.photos.library.v1.internal.InternalPhotosLibraryClient.ListAlbumsPagedResponse; import static com.google.photos.library.v1.internal.InternalPhotosLibraryClient.ListMediaItemsPagedResponse; import static com.google.photos.library.v1.internal.InternalPhotosLibraryClient.ListSharedAlbumsPagedResponse; import static com.google.photos.library.v1.internal.InternalPhotosLibraryClient.SearchMediaItemsPagedResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.common.collect.ImmutableMap; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.photos.library.v1.proto.AddEnrichmentToAlbumRequest; import com.google.photos.library.v1.proto.AddEnrichmentToAlbumResponse; import com.google.photos.library.v1.proto.BatchAddMediaItemsToAlbumRequest; import com.google.photos.library.v1.proto.BatchAddMediaItemsToAlbumResponse; import com.google.photos.library.v1.proto.BatchCreateMediaItemsRequest; import com.google.photos.library.v1.proto.BatchCreateMediaItemsResponse; import com.google.photos.library.v1.proto.BatchGetMediaItemsRequest; import com.google.photos.library.v1.proto.BatchGetMediaItemsResponse; import com.google.photos.library.v1.proto.BatchRemoveMediaItemsFromAlbumRequest; import com.google.photos.library.v1.proto.BatchRemoveMediaItemsFromAlbumResponse; import com.google.photos.library.v1.proto.CreateAlbumRequest; import com.google.photos.library.v1.proto.GetAlbumRequest; import com.google.photos.library.v1.proto.GetMediaItemRequest; import com.google.photos.library.v1.proto.GetSharedAlbumRequest; import com.google.photos.library.v1.proto.JoinSharedAlbumRequest; import com.google.photos.library.v1.proto.JoinSharedAlbumResponse; import com.google.photos.library.v1.proto.LeaveSharedAlbumRequest; import com.google.photos.library.v1.proto.LeaveSharedAlbumResponse; import com.google.photos.library.v1.proto.ListAlbumsRequest; import com.google.photos.library.v1.proto.ListAlbumsResponse; import com.google.photos.library.v1.proto.ListMediaItemsRequest; import com.google.photos.library.v1.proto.ListMediaItemsResponse; import com.google.photos.library.v1.proto.ListSharedAlbumsRequest; import com.google.photos.library.v1.proto.ListSharedAlbumsResponse; import com.google.photos.library.v1.proto.SearchMediaItemsRequest; import com.google.photos.library.v1.proto.SearchMediaItemsResponse; import com.google.photos.library.v1.proto.ShareAlbumRequest; import com.google.photos.library.v1.proto.ShareAlbumResponse; import com.google.photos.library.v1.proto.UnshareAlbumRequest; import com.google.photos.library.v1.proto.UnshareAlbumResponse; import com.google.photos.library.v1.proto.UpdateAlbumRequest; import com.google.photos.library.v1.proto.UpdateMediaItemRequest; import com.google.photos.types.proto.Album; import com.google.photos.types.proto.MediaItem; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the PhotosLibrary service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcPhotosLibraryStub extends PhotosLibraryStub { private static final MethodDescriptor<CreateAlbumRequest, Album> createAlbumMethodDescriptor = MethodDescriptor.<CreateAlbumRequest, Album>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/CreateAlbum") .setRequestMarshaller(ProtoUtils.marshaller(CreateAlbumRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Album.getDefaultInstance())) .build(); private static final MethodDescriptor<BatchCreateMediaItemsRequest, BatchCreateMediaItemsResponse> batchCreateMediaItemsMethodDescriptor = MethodDescriptor.<BatchCreateMediaItemsRequest, BatchCreateMediaItemsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/BatchCreateMediaItems") .setRequestMarshaller( ProtoUtils.marshaller(BatchCreateMediaItemsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(BatchCreateMediaItemsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor< BatchAddMediaItemsToAlbumRequest, BatchAddMediaItemsToAlbumResponse> batchAddMediaItemsToAlbumMethodDescriptor = MethodDescriptor .<BatchAddMediaItemsToAlbumRequest, BatchAddMediaItemsToAlbumResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/BatchAddMediaItemsToAlbum") .setRequestMarshaller( ProtoUtils.marshaller(BatchAddMediaItemsToAlbumRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(BatchAddMediaItemsToAlbumResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<SearchMediaItemsRequest, SearchMediaItemsResponse> searchMediaItemsMethodDescriptor = MethodDescriptor.<SearchMediaItemsRequest, SearchMediaItemsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/SearchMediaItems") .setRequestMarshaller( ProtoUtils.marshaller(SearchMediaItemsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(SearchMediaItemsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<ListMediaItemsRequest, ListMediaItemsResponse> listMediaItemsMethodDescriptor = MethodDescriptor.<ListMediaItemsRequest, ListMediaItemsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/ListMediaItems") .setRequestMarshaller( ProtoUtils.marshaller(ListMediaItemsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListMediaItemsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<GetMediaItemRequest, MediaItem> getMediaItemMethodDescriptor = MethodDescriptor.<GetMediaItemRequest, MediaItem>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/GetMediaItem") .setRequestMarshaller(ProtoUtils.marshaller(GetMediaItemRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(MediaItem.getDefaultInstance())) .build(); private static final MethodDescriptor<BatchGetMediaItemsRequest, BatchGetMediaItemsResponse> batchGetMediaItemsMethodDescriptor = MethodDescriptor.<BatchGetMediaItemsRequest, BatchGetMediaItemsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/BatchGetMediaItems") .setRequestMarshaller( ProtoUtils.marshaller(BatchGetMediaItemsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(BatchGetMediaItemsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<ListAlbumsRequest, ListAlbumsResponse> listAlbumsMethodDescriptor = MethodDescriptor.<ListAlbumsRequest, ListAlbumsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/ListAlbums") .setRequestMarshaller(ProtoUtils.marshaller(ListAlbumsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ListAlbumsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<GetAlbumRequest, Album> getAlbumMethodDescriptor = MethodDescriptor.<GetAlbumRequest, Album>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/GetAlbum") .setRequestMarshaller(ProtoUtils.marshaller(GetAlbumRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Album.getDefaultInstance())) .build(); private static final MethodDescriptor<GetSharedAlbumRequest, Album> getSharedAlbumMethodDescriptor = MethodDescriptor.<GetSharedAlbumRequest, Album>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/GetSharedAlbum") .setRequestMarshaller( ProtoUtils.marshaller(GetSharedAlbumRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Album.getDefaultInstance())) .build(); private static final MethodDescriptor<AddEnrichmentToAlbumRequest, AddEnrichmentToAlbumResponse> addEnrichmentToAlbumMethodDescriptor = MethodDescriptor.<AddEnrichmentToAlbumRequest, AddEnrichmentToAlbumResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/AddEnrichmentToAlbum") .setRequestMarshaller( ProtoUtils.marshaller(AddEnrichmentToAlbumRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(AddEnrichmentToAlbumResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<JoinSharedAlbumRequest, JoinSharedAlbumResponse> joinSharedAlbumMethodDescriptor = MethodDescriptor.<JoinSharedAlbumRequest, JoinSharedAlbumResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/JoinSharedAlbum") .setRequestMarshaller( ProtoUtils.marshaller(JoinSharedAlbumRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(JoinSharedAlbumResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<LeaveSharedAlbumRequest, LeaveSharedAlbumResponse> leaveSharedAlbumMethodDescriptor = MethodDescriptor.<LeaveSharedAlbumRequest, LeaveSharedAlbumResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/LeaveSharedAlbum") .setRequestMarshaller( ProtoUtils.marshaller(LeaveSharedAlbumRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(LeaveSharedAlbumResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<ShareAlbumRequest, ShareAlbumResponse> shareAlbumMethodDescriptor = MethodDescriptor.<ShareAlbumRequest, ShareAlbumResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/ShareAlbum") .setRequestMarshaller(ProtoUtils.marshaller(ShareAlbumRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ShareAlbumResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<ListSharedAlbumsRequest, ListSharedAlbumsResponse> listSharedAlbumsMethodDescriptor = MethodDescriptor.<ListSharedAlbumsRequest, ListSharedAlbumsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/ListSharedAlbums") .setRequestMarshaller( ProtoUtils.marshaller(ListSharedAlbumsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListSharedAlbumsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<UnshareAlbumRequest, UnshareAlbumResponse> unshareAlbumMethodDescriptor = MethodDescriptor.<UnshareAlbumRequest, UnshareAlbumResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/UnshareAlbum") .setRequestMarshaller(ProtoUtils.marshaller(UnshareAlbumRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(UnshareAlbumResponse.getDefaultInstance())) .build(); private static final MethodDescriptor< BatchRemoveMediaItemsFromAlbumRequest, BatchRemoveMediaItemsFromAlbumResponse> batchRemoveMediaItemsFromAlbumMethodDescriptor = MethodDescriptor .<BatchRemoveMediaItemsFromAlbumRequest, BatchRemoveMediaItemsFromAlbumResponse> newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.photos.library.v1.PhotosLibrary/BatchRemoveMediaItemsFromAlbum") .setRequestMarshaller( ProtoUtils.marshaller(BatchRemoveMediaItemsFromAlbumRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller( BatchRemoveMediaItemsFromAlbumResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<UpdateAlbumRequest, Album> updateAlbumMethodDescriptor = MethodDescriptor.<UpdateAlbumRequest, Album>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/UpdateAlbum") .setRequestMarshaller(ProtoUtils.marshaller(UpdateAlbumRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Album.getDefaultInstance())) .build(); private static final MethodDescriptor<UpdateMediaItemRequest, MediaItem> updateMediaItemMethodDescriptor = MethodDescriptor.<UpdateMediaItemRequest, MediaItem>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.photos.library.v1.PhotosLibrary/UpdateMediaItem") .setRequestMarshaller( ProtoUtils.marshaller(UpdateMediaItemRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(MediaItem.getDefaultInstance())) .build(); private final UnaryCallable<CreateAlbumRequest, Album> createAlbumCallable; private final UnaryCallable<BatchCreateMediaItemsRequest, BatchCreateMediaItemsResponse> batchCreateMediaItemsCallable; private final UnaryCallable<BatchAddMediaItemsToAlbumRequest, BatchAddMediaItemsToAlbumResponse> batchAddMediaItemsToAlbumCallable; private final UnaryCallable<SearchMediaItemsRequest, SearchMediaItemsResponse> searchMediaItemsCallable; private final UnaryCallable<SearchMediaItemsRequest, SearchMediaItemsPagedResponse> searchMediaItemsPagedCallable; private final UnaryCallable<ListMediaItemsRequest, ListMediaItemsResponse> listMediaItemsCallable; private final UnaryCallable<ListMediaItemsRequest, ListMediaItemsPagedResponse> listMediaItemsPagedCallable; private final UnaryCallable<GetMediaItemRequest, MediaItem> getMediaItemCallable; private final UnaryCallable<BatchGetMediaItemsRequest, BatchGetMediaItemsResponse> batchGetMediaItemsCallable; private final UnaryCallable<ListAlbumsRequest, ListAlbumsResponse> listAlbumsCallable; private final UnaryCallable<ListAlbumsRequest, ListAlbumsPagedResponse> listAlbumsPagedCallable; private final UnaryCallable<GetAlbumRequest, Album> getAlbumCallable; private final UnaryCallable<GetSharedAlbumRequest, Album> getSharedAlbumCallable; private final UnaryCallable<AddEnrichmentToAlbumRequest, AddEnrichmentToAlbumResponse> addEnrichmentToAlbumCallable; private final UnaryCallable<JoinSharedAlbumRequest, JoinSharedAlbumResponse> joinSharedAlbumCallable; private final UnaryCallable<LeaveSharedAlbumRequest, LeaveSharedAlbumResponse> leaveSharedAlbumCallable; private final UnaryCallable<ShareAlbumRequest, ShareAlbumResponse> shareAlbumCallable; private final UnaryCallable<ListSharedAlbumsRequest, ListSharedAlbumsResponse> listSharedAlbumsCallable; private final UnaryCallable<ListSharedAlbumsRequest, ListSharedAlbumsPagedResponse> listSharedAlbumsPagedCallable; private final UnaryCallable<UnshareAlbumRequest, UnshareAlbumResponse> unshareAlbumCallable; private final UnaryCallable< BatchRemoveMediaItemsFromAlbumRequest, BatchRemoveMediaItemsFromAlbumResponse> batchRemoveMediaItemsFromAlbumCallable; private final UnaryCallable<UpdateAlbumRequest, Album> updateAlbumCallable; private final UnaryCallable<UpdateMediaItemRequest, MediaItem> updateMediaItemCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcPhotosLibraryStub create(PhotosLibraryStubSettings settings) throws IOException { return new GrpcPhotosLibraryStub(settings, ClientContext.create(settings)); } public static final GrpcPhotosLibraryStub create(ClientContext clientContext) throws IOException { return new GrpcPhotosLibraryStub(PhotosLibraryStubSettings.newBuilder().build(), clientContext); } public static final GrpcPhotosLibraryStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcPhotosLibraryStub( PhotosLibraryStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcPhotosLibraryStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcPhotosLibraryStub(PhotosLibraryStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcPhotosLibraryCallableFactory()); } /** * Constructs an instance of GrpcPhotosLibraryStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected GrpcPhotosLibraryStub( PhotosLibraryStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<CreateAlbumRequest, Album> createAlbumTransportSettings = GrpcCallSettings.<CreateAlbumRequest, Album>newBuilder() .setMethodDescriptor(createAlbumMethodDescriptor) .build(); GrpcCallSettings<BatchCreateMediaItemsRequest, BatchCreateMediaItemsResponse> batchCreateMediaItemsTransportSettings = GrpcCallSettings .<BatchCreateMediaItemsRequest, BatchCreateMediaItemsResponse>newBuilder() .setMethodDescriptor(batchCreateMediaItemsMethodDescriptor) .build(); GrpcCallSettings<BatchAddMediaItemsToAlbumRequest, BatchAddMediaItemsToAlbumResponse> batchAddMediaItemsToAlbumTransportSettings = GrpcCallSettings .<BatchAddMediaItemsToAlbumRequest, BatchAddMediaItemsToAlbumResponse>newBuilder() .setMethodDescriptor(batchAddMediaItemsToAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("album_id", String.valueOf(request.getAlbumId())); return params.build(); }) .build(); GrpcCallSettings<SearchMediaItemsRequest, SearchMediaItemsResponse> searchMediaItemsTransportSettings = GrpcCallSettings.<SearchMediaItemsRequest, SearchMediaItemsResponse>newBuilder() .setMethodDescriptor(searchMediaItemsMethodDescriptor) .build(); GrpcCallSettings<ListMediaItemsRequest, ListMediaItemsResponse> listMediaItemsTransportSettings = GrpcCallSettings.<ListMediaItemsRequest, ListMediaItemsResponse>newBuilder() .setMethodDescriptor(listMediaItemsMethodDescriptor) .build(); GrpcCallSettings<GetMediaItemRequest, MediaItem> getMediaItemTransportSettings = GrpcCallSettings.<GetMediaItemRequest, MediaItem>newBuilder() .setMethodDescriptor(getMediaItemMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("media_item_id", String.valueOf(request.getMediaItemId())); return params.build(); }) .build(); GrpcCallSettings<BatchGetMediaItemsRequest, BatchGetMediaItemsResponse> batchGetMediaItemsTransportSettings = GrpcCallSettings.<BatchGetMediaItemsRequest, BatchGetMediaItemsResponse>newBuilder() .setMethodDescriptor(batchGetMediaItemsMethodDescriptor) .build(); GrpcCallSettings<ListAlbumsRequest, ListAlbumsResponse> listAlbumsTransportSettings = GrpcCallSettings.<ListAlbumsRequest, ListAlbumsResponse>newBuilder() .setMethodDescriptor(listAlbumsMethodDescriptor) .build(); GrpcCallSettings<GetAlbumRequest, Album> getAlbumTransportSettings = GrpcCallSettings.<GetAlbumRequest, Album>newBuilder() .setMethodDescriptor(getAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("album_id", String.valueOf(request.getAlbumId())); return params.build(); }) .build(); GrpcCallSettings<GetSharedAlbumRequest, Album> getSharedAlbumTransportSettings = GrpcCallSettings.<GetSharedAlbumRequest, Album>newBuilder() .setMethodDescriptor(getSharedAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("share_token", String.valueOf(request.getShareToken())); return params.build(); }) .build(); GrpcCallSettings<AddEnrichmentToAlbumRequest, AddEnrichmentToAlbumResponse> addEnrichmentToAlbumTransportSettings = GrpcCallSettings.<AddEnrichmentToAlbumRequest, AddEnrichmentToAlbumResponse>newBuilder() .setMethodDescriptor(addEnrichmentToAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("album_id", String.valueOf(request.getAlbumId())); return params.build(); }) .build(); GrpcCallSettings<JoinSharedAlbumRequest, JoinSharedAlbumResponse> joinSharedAlbumTransportSettings = GrpcCallSettings.<JoinSharedAlbumRequest, JoinSharedAlbumResponse>newBuilder() .setMethodDescriptor(joinSharedAlbumMethodDescriptor) .build(); GrpcCallSettings<LeaveSharedAlbumRequest, LeaveSharedAlbumResponse> leaveSharedAlbumTransportSettings = GrpcCallSettings.<LeaveSharedAlbumRequest, LeaveSharedAlbumResponse>newBuilder() .setMethodDescriptor(leaveSharedAlbumMethodDescriptor) .build(); GrpcCallSettings<ShareAlbumRequest, ShareAlbumResponse> shareAlbumTransportSettings = GrpcCallSettings.<ShareAlbumRequest, ShareAlbumResponse>newBuilder() .setMethodDescriptor(shareAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("album_id", String.valueOf(request.getAlbumId())); return params.build(); }) .build(); GrpcCallSettings<ListSharedAlbumsRequest, ListSharedAlbumsResponse> listSharedAlbumsTransportSettings = GrpcCallSettings.<ListSharedAlbumsRequest, ListSharedAlbumsResponse>newBuilder() .setMethodDescriptor(listSharedAlbumsMethodDescriptor) .build(); GrpcCallSettings<UnshareAlbumRequest, UnshareAlbumResponse> unshareAlbumTransportSettings = GrpcCallSettings.<UnshareAlbumRequest, UnshareAlbumResponse>newBuilder() .setMethodDescriptor(unshareAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("album_id", String.valueOf(request.getAlbumId())); return params.build(); }) .build(); GrpcCallSettings<BatchRemoveMediaItemsFromAlbumRequest, BatchRemoveMediaItemsFromAlbumResponse> batchRemoveMediaItemsFromAlbumTransportSettings = GrpcCallSettings .<BatchRemoveMediaItemsFromAlbumRequest, BatchRemoveMediaItemsFromAlbumResponse> newBuilder() .setMethodDescriptor(batchRemoveMediaItemsFromAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("album_id", String.valueOf(request.getAlbumId())); return params.build(); }) .build(); GrpcCallSettings<UpdateAlbumRequest, Album> updateAlbumTransportSettings = GrpcCallSettings.<UpdateAlbumRequest, Album>newBuilder() .setMethodDescriptor(updateAlbumMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("album.id", String.valueOf(request.getAlbum().getId())); return params.build(); }) .build(); GrpcCallSettings<UpdateMediaItemRequest, MediaItem> updateMediaItemTransportSettings = GrpcCallSettings.<UpdateMediaItemRequest, MediaItem>newBuilder() .setMethodDescriptor(updateMediaItemMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("media_item.id", String.valueOf(request.getMediaItem().getId())); return params.build(); }) .build(); this.createAlbumCallable = callableFactory.createUnaryCallable( createAlbumTransportSettings, settings.createAlbumSettings(), clientContext); this.batchCreateMediaItemsCallable = callableFactory.createUnaryCallable( batchCreateMediaItemsTransportSettings, settings.batchCreateMediaItemsSettings(), clientContext); this.batchAddMediaItemsToAlbumCallable = callableFactory.createUnaryCallable( batchAddMediaItemsToAlbumTransportSettings, settings.batchAddMediaItemsToAlbumSettings(), clientContext); this.searchMediaItemsCallable = callableFactory.createUnaryCallable( searchMediaItemsTransportSettings, settings.searchMediaItemsSettings(), clientContext); this.searchMediaItemsPagedCallable = callableFactory.createPagedCallable( searchMediaItemsTransportSettings, settings.searchMediaItemsSettings(), clientContext); this.listMediaItemsCallable = callableFactory.createUnaryCallable( listMediaItemsTransportSettings, settings.listMediaItemsSettings(), clientContext); this.listMediaItemsPagedCallable = callableFactory.createPagedCallable( listMediaItemsTransportSettings, settings.listMediaItemsSettings(), clientContext); this.getMediaItemCallable = callableFactory.createUnaryCallable( getMediaItemTransportSettings, settings.getMediaItemSettings(), clientContext); this.batchGetMediaItemsCallable = callableFactory.createUnaryCallable( batchGetMediaItemsTransportSettings, settings.batchGetMediaItemsSettings(), clientContext); this.listAlbumsCallable = callableFactory.createUnaryCallable( listAlbumsTransportSettings, settings.listAlbumsSettings(), clientContext); this.listAlbumsPagedCallable = callableFactory.createPagedCallable( listAlbumsTransportSettings, settings.listAlbumsSettings(), clientContext); this.getAlbumCallable = callableFactory.createUnaryCallable( getAlbumTransportSettings, settings.getAlbumSettings(), clientContext); this.getSharedAlbumCallable = callableFactory.createUnaryCallable( getSharedAlbumTransportSettings, settings.getSharedAlbumSettings(), clientContext); this.addEnrichmentToAlbumCallable = callableFactory.createUnaryCallable( addEnrichmentToAlbumTransportSettings, settings.addEnrichmentToAlbumSettings(), clientContext); this.joinSharedAlbumCallable = callableFactory.createUnaryCallable( joinSharedAlbumTransportSettings, settings.joinSharedAlbumSettings(), clientContext); this.leaveSharedAlbumCallable = callableFactory.createUnaryCallable( leaveSharedAlbumTransportSettings, settings.leaveSharedAlbumSettings(), clientContext); this.shareAlbumCallable = callableFactory.createUnaryCallable( shareAlbumTransportSettings, settings.shareAlbumSettings(), clientContext); this.listSharedAlbumsCallable = callableFactory.createUnaryCallable( listSharedAlbumsTransportSettings, settings.listSharedAlbumsSettings(), clientContext); this.listSharedAlbumsPagedCallable = callableFactory.createPagedCallable( listSharedAlbumsTransportSettings, settings.listSharedAlbumsSettings(), clientContext); this.unshareAlbumCallable = callableFactory.createUnaryCallable( unshareAlbumTransportSettings, settings.unshareAlbumSettings(), clientContext); this.batchRemoveMediaItemsFromAlbumCallable = callableFactory.createUnaryCallable( batchRemoveMediaItemsFromAlbumTransportSettings, settings.batchRemoveMediaItemsFromAlbumSettings(), clientContext); this.updateAlbumCallable = callableFactory.createUnaryCallable( updateAlbumTransportSettings, settings.updateAlbumSettings(), clientContext); this.updateMediaItemCallable = callableFactory.createUnaryCallable( updateMediaItemTransportSettings, settings.updateMediaItemSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<CreateAlbumRequest, Album> createAlbumCallable() { return createAlbumCallable; } @Override public UnaryCallable<BatchCreateMediaItemsRequest, BatchCreateMediaItemsResponse> batchCreateMediaItemsCallable() { return batchCreateMediaItemsCallable; } @Override public UnaryCallable<BatchAddMediaItemsToAlbumRequest, BatchAddMediaItemsToAlbumResponse> batchAddMediaItemsToAlbumCallable() { return batchAddMediaItemsToAlbumCallable; } @Override public UnaryCallable<SearchMediaItemsRequest, SearchMediaItemsResponse> searchMediaItemsCallable() { return searchMediaItemsCallable; } @Override public UnaryCallable<SearchMediaItemsRequest, SearchMediaItemsPagedResponse> searchMediaItemsPagedCallable() { return searchMediaItemsPagedCallable; } @Override public UnaryCallable<ListMediaItemsRequest, ListMediaItemsResponse> listMediaItemsCallable() { return listMediaItemsCallable; } @Override public UnaryCallable<ListMediaItemsRequest, ListMediaItemsPagedResponse> listMediaItemsPagedCallable() { return listMediaItemsPagedCallable; } @Override public UnaryCallable<GetMediaItemRequest, MediaItem> getMediaItemCallable() { return getMediaItemCallable; } @Override public UnaryCallable<BatchGetMediaItemsRequest, BatchGetMediaItemsResponse> batchGetMediaItemsCallable() { return batchGetMediaItemsCallable; } @Override public UnaryCallable<ListAlbumsRequest, ListAlbumsResponse> listAlbumsCallable() { return listAlbumsCallable; } @Override public UnaryCallable<ListAlbumsRequest, ListAlbumsPagedResponse> listAlbumsPagedCallable() { return listAlbumsPagedCallable; } @Override public UnaryCallable<GetAlbumRequest, Album> getAlbumCallable() { return getAlbumCallable; } @Override public UnaryCallable<GetSharedAlbumRequest, Album> getSharedAlbumCallable() { return getSharedAlbumCallable; } @Override public UnaryCallable<AddEnrichmentToAlbumRequest, AddEnrichmentToAlbumResponse> addEnrichmentToAlbumCallable() { return addEnrichmentToAlbumCallable; } @Override public UnaryCallable<JoinSharedAlbumRequest, JoinSharedAlbumResponse> joinSharedAlbumCallable() { return joinSharedAlbumCallable; } @Override public UnaryCallable<LeaveSharedAlbumRequest, LeaveSharedAlbumResponse> leaveSharedAlbumCallable() { return leaveSharedAlbumCallable; } @Override public UnaryCallable<ShareAlbumRequest, ShareAlbumResponse> shareAlbumCallable() { return shareAlbumCallable; } @Override public UnaryCallable<ListSharedAlbumsRequest, ListSharedAlbumsResponse> listSharedAlbumsCallable() { return listSharedAlbumsCallable; } @Override public UnaryCallable<ListSharedAlbumsRequest, ListSharedAlbumsPagedResponse> listSharedAlbumsPagedCallable() { return listSharedAlbumsPagedCallable; } @Override public UnaryCallable<UnshareAlbumRequest, UnshareAlbumResponse> unshareAlbumCallable() { return unshareAlbumCallable; } @Override public UnaryCallable< BatchRemoveMediaItemsFromAlbumRequest, BatchRemoveMediaItemsFromAlbumResponse> batchRemoveMediaItemsFromAlbumCallable() { return batchRemoveMediaItemsFromAlbumCallable; } @Override public UnaryCallable<UpdateAlbumRequest, Album> updateAlbumCallable() { return updateAlbumCallable; } @Override public UnaryCallable<UpdateMediaItemRequest, MediaItem> updateMediaItemCallable() { return updateMediaItemCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-cloud-java
37,100
java-rapidmigrationassessment/proto-google-cloud-rapidmigrationassessment-v1/src/main/java/com/google/cloud/rapidmigrationassessment/v1/CreateAnnotationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/rapidmigrationassessment/v1/rapidmigrationassessment.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.rapidmigrationassessment.v1; /** * * * <pre> * Message for creating an AnnotationS. * </pre> * * Protobuf type {@code google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest} */ public final class CreateAnnotationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest) CreateAnnotationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateAnnotationRequest.newBuilder() to construct. private CreateAnnotationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateAnnotationRequest() { parent_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateAnnotationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessmentProto .internal_static_google_cloud_rapidmigrationassessment_v1_CreateAnnotationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessmentProto .internal_static_google_cloud_rapidmigrationassessment_v1_CreateAnnotationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest.class, com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Name of the parent (project+location). * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Name of the parent (project+location). * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ANNOTATION_FIELD_NUMBER = 2; private com.google.cloud.rapidmigrationassessment.v1.Annotation annotation_; /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the annotation field is set. */ @java.lang.Override public boolean hasAnnotation() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The annotation. */ @java.lang.Override public com.google.cloud.rapidmigrationassessment.v1.Annotation getAnnotation() { return annotation_ == null ? com.google.cloud.rapidmigrationassessment.v1.Annotation.getDefaultInstance() : annotation_; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.rapidmigrationassessment.v1.AnnotationOrBuilder getAnnotationOrBuilder() { return annotation_ == null ? com.google.cloud.rapidmigrationassessment.v1.Annotation.getDefaultInstance() : annotation_; } public static final int REQUEST_ID_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getAnnotation()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, requestId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getAnnotation()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, requestId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest)) { return super.equals(obj); } com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest other = (com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasAnnotation() != other.hasAnnotation()) return false; if (hasAnnotation()) { if (!getAnnotation().equals(other.getAnnotation())) return false; } if (!getRequestId().equals(other.getRequestId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasAnnotation()) { hash = (37 * hash) + ANNOTATION_FIELD_NUMBER; hash = (53 * hash) + getAnnotation().hashCode(); } hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for creating an AnnotationS. * </pre> * * Protobuf type {@code google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest) com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessmentProto .internal_static_google_cloud_rapidmigrationassessment_v1_CreateAnnotationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessmentProto .internal_static_google_cloud_rapidmigrationassessment_v1_CreateAnnotationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest.class, com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest.Builder.class); } // Construct using // com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getAnnotationFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; annotation_ = null; if (annotationBuilder_ != null) { annotationBuilder_.dispose(); annotationBuilder_ = null; } requestId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.rapidmigrationassessment.v1.RapidMigrationAssessmentProto .internal_static_google_cloud_rapidmigrationassessment_v1_CreateAnnotationRequest_descriptor; } @java.lang.Override public com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest getDefaultInstanceForType() { return com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest build() { com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest buildPartial() { com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest result = new com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.annotation_ = annotationBuilder_ == null ? annotation_ : annotationBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.requestId_ = requestId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest) { return mergeFrom( (com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest other) { if (other == com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasAnnotation()) { mergeAnnotation(other.getAnnotation()); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getAnnotationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 34: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Name of the parent (project+location). * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of the parent (project+location). * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of the parent (project+location). * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of the parent (project+location). * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of the parent (project+location). * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.rapidmigrationassessment.v1.Annotation annotation_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.rapidmigrationassessment.v1.Annotation, com.google.cloud.rapidmigrationassessment.v1.Annotation.Builder, com.google.cloud.rapidmigrationassessment.v1.AnnotationOrBuilder> annotationBuilder_; /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the annotation field is set. */ public boolean hasAnnotation() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The annotation. */ public com.google.cloud.rapidmigrationassessment.v1.Annotation getAnnotation() { if (annotationBuilder_ == null) { return annotation_ == null ? com.google.cloud.rapidmigrationassessment.v1.Annotation.getDefaultInstance() : annotation_; } else { return annotationBuilder_.getMessage(); } } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAnnotation(com.google.cloud.rapidmigrationassessment.v1.Annotation value) { if (annotationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } annotation_ = value; } else { annotationBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAnnotation( com.google.cloud.rapidmigrationassessment.v1.Annotation.Builder builderForValue) { if (annotationBuilder_ == null) { annotation_ = builderForValue.build(); } else { annotationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeAnnotation(com.google.cloud.rapidmigrationassessment.v1.Annotation value) { if (annotationBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && annotation_ != null && annotation_ != com.google.cloud.rapidmigrationassessment.v1.Annotation.getDefaultInstance()) { getAnnotationBuilder().mergeFrom(value); } else { annotation_ = value; } } else { annotationBuilder_.mergeFrom(value); } if (annotation_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearAnnotation() { bitField0_ = (bitField0_ & ~0x00000002); annotation_ = null; if (annotationBuilder_ != null) { annotationBuilder_.dispose(); annotationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.rapidmigrationassessment.v1.Annotation.Builder getAnnotationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getAnnotationFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.rapidmigrationassessment.v1.AnnotationOrBuilder getAnnotationOrBuilder() { if (annotationBuilder_ != null) { return annotationBuilder_.getMessageOrBuilder(); } else { return annotation_ == null ? com.google.cloud.rapidmigrationassessment.v1.Annotation.getDefaultInstance() : annotation_; } } /** * * * <pre> * Required. The resource being created. * </pre> * * <code> * .google.cloud.rapidmigrationassessment.v1.Annotation annotation = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.rapidmigrationassessment.v1.Annotation, com.google.cloud.rapidmigrationassessment.v1.Annotation.Builder, com.google.cloud.rapidmigrationassessment.v1.AnnotationOrBuilder> getAnnotationFieldBuilder() { if (annotationBuilder_ == null) { annotationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.rapidmigrationassessment.v1.Annotation, com.google.cloud.rapidmigrationassessment.v1.Annotation.Builder, com.google.cloud.rapidmigrationassessment.v1.AnnotationOrBuilder>( getAnnotation(), getParentForChildren(), isClean()); annotation_ = null; } return annotationBuilder_; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. * </pre> * * <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest) } // @@protoc_insertion_point(class_scope:google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest) private static final com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest(); } public static com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateAnnotationRequest> PARSER = new com.google.protobuf.AbstractParser<CreateAnnotationRequest>() { @java.lang.Override public CreateAnnotationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateAnnotationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateAnnotationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.rapidmigrationassessment.v1.CreateAnnotationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,122
java-assured-workloads/proto-google-cloud-assured-workloads-v1beta1/src/main/java/com/google/cloud/assuredworkloads/v1beta1/ListWorkloadsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/assuredworkloads/v1beta1/assuredworkloads.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.assuredworkloads.v1beta1; /** * * * <pre> * Response of ListWorkloads endpoint. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse} */ public final class ListWorkloadsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse) ListWorkloadsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListWorkloadsResponse.newBuilder() to construct. private ListWorkloadsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListWorkloadsResponse() { workloads_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListWorkloadsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse.class, com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse.Builder.class); } public static final int WORKLOADS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.assuredworkloads.v1beta1.Workload> workloads_; /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.assuredworkloads.v1beta1.Workload> getWorkloadsList() { return workloads_; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder> getWorkloadsOrBuilderList() { return workloads_; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ @java.lang.Override public int getWorkloadsCount() { return workloads_.size(); } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.Workload getWorkloads(int index) { return workloads_.get(index); } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder getWorkloadsOrBuilder( int index) { return workloads_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token. Return empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The next page token. Return empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < workloads_.size(); i++) { output.writeMessage(1, workloads_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < workloads_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, workloads_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse)) { return super.equals(obj); } com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse other = (com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse) obj; if (!getWorkloadsList().equals(other.getWorkloadsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getWorkloadsCount() > 0) { hash = (37 * hash) + WORKLOADS_FIELD_NUMBER; hash = (53 * hash) + getWorkloadsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response of ListWorkloads endpoint. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse) com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse.class, com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse.Builder.class); } // Construct using com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (workloadsBuilder_ == null) { workloads_ = java.util.Collections.emptyList(); } else { workloads_ = null; workloadsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_ListWorkloadsResponse_descriptor; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse getDefaultInstanceForType() { return com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse build() { com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse buildPartial() { com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse result = new com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse result) { if (workloadsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { workloads_ = java.util.Collections.unmodifiableList(workloads_); bitField0_ = (bitField0_ & ~0x00000001); } result.workloads_ = workloads_; } else { result.workloads_ = workloadsBuilder_.build(); } } private void buildPartial0( com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse) { return mergeFrom((com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse other) { if (other == com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse.getDefaultInstance()) return this; if (workloadsBuilder_ == null) { if (!other.workloads_.isEmpty()) { if (workloads_.isEmpty()) { workloads_ = other.workloads_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureWorkloadsIsMutable(); workloads_.addAll(other.workloads_); } onChanged(); } } else { if (!other.workloads_.isEmpty()) { if (workloadsBuilder_.isEmpty()) { workloadsBuilder_.dispose(); workloadsBuilder_ = null; workloads_ = other.workloads_; bitField0_ = (bitField0_ & ~0x00000001); workloadsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getWorkloadsFieldBuilder() : null; } else { workloadsBuilder_.addAllMessages(other.workloads_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.assuredworkloads.v1beta1.Workload m = input.readMessage( com.google.cloud.assuredworkloads.v1beta1.Workload.parser(), extensionRegistry); if (workloadsBuilder_ == null) { ensureWorkloadsIsMutable(); workloads_.add(m); } else { workloadsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.assuredworkloads.v1beta1.Workload> workloads_ = java.util.Collections.emptyList(); private void ensureWorkloadsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { workloads_ = new java.util.ArrayList<com.google.cloud.assuredworkloads.v1beta1.Workload>(workloads_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.assuredworkloads.v1beta1.Workload, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder, com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder> workloadsBuilder_; /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public java.util.List<com.google.cloud.assuredworkloads.v1beta1.Workload> getWorkloadsList() { if (workloadsBuilder_ == null) { return java.util.Collections.unmodifiableList(workloads_); } else { return workloadsBuilder_.getMessageList(); } } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public int getWorkloadsCount() { if (workloadsBuilder_ == null) { return workloads_.size(); } else { return workloadsBuilder_.getCount(); } } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public com.google.cloud.assuredworkloads.v1beta1.Workload getWorkloads(int index) { if (workloadsBuilder_ == null) { return workloads_.get(index); } else { return workloadsBuilder_.getMessage(index); } } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder setWorkloads( int index, com.google.cloud.assuredworkloads.v1beta1.Workload value) { if (workloadsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkloadsIsMutable(); workloads_.set(index, value); onChanged(); } else { workloadsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder setWorkloads( int index, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder builderForValue) { if (workloadsBuilder_ == null) { ensureWorkloadsIsMutable(); workloads_.set(index, builderForValue.build()); onChanged(); } else { workloadsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder addWorkloads(com.google.cloud.assuredworkloads.v1beta1.Workload value) { if (workloadsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkloadsIsMutable(); workloads_.add(value); onChanged(); } else { workloadsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder addWorkloads( int index, com.google.cloud.assuredworkloads.v1beta1.Workload value) { if (workloadsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkloadsIsMutable(); workloads_.add(index, value); onChanged(); } else { workloadsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder addWorkloads( com.google.cloud.assuredworkloads.v1beta1.Workload.Builder builderForValue) { if (workloadsBuilder_ == null) { ensureWorkloadsIsMutable(); workloads_.add(builderForValue.build()); onChanged(); } else { workloadsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder addWorkloads( int index, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder builderForValue) { if (workloadsBuilder_ == null) { ensureWorkloadsIsMutable(); workloads_.add(index, builderForValue.build()); onChanged(); } else { workloadsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder addAllWorkloads( java.lang.Iterable<? extends com.google.cloud.assuredworkloads.v1beta1.Workload> values) { if (workloadsBuilder_ == null) { ensureWorkloadsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, workloads_); onChanged(); } else { workloadsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder clearWorkloads() { if (workloadsBuilder_ == null) { workloads_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { workloadsBuilder_.clear(); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public Builder removeWorkloads(int index) { if (workloadsBuilder_ == null) { ensureWorkloadsIsMutable(); workloads_.remove(index); onChanged(); } else { workloadsBuilder_.remove(index); } return this; } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public com.google.cloud.assuredworkloads.v1beta1.Workload.Builder getWorkloadsBuilder( int index) { return getWorkloadsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder getWorkloadsOrBuilder( int index) { if (workloadsBuilder_ == null) { return workloads_.get(index); } else { return workloadsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public java.util.List<? extends com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder> getWorkloadsOrBuilderList() { if (workloadsBuilder_ != null) { return workloadsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(workloads_); } } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public com.google.cloud.assuredworkloads.v1beta1.Workload.Builder addWorkloadsBuilder() { return getWorkloadsFieldBuilder() .addBuilder(com.google.cloud.assuredworkloads.v1beta1.Workload.getDefaultInstance()); } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public com.google.cloud.assuredworkloads.v1beta1.Workload.Builder addWorkloadsBuilder( int index) { return getWorkloadsFieldBuilder() .addBuilder( index, com.google.cloud.assuredworkloads.v1beta1.Workload.getDefaultInstance()); } /** * * * <pre> * List of Workloads under a given parent. * </pre> * * <code>repeated .google.cloud.assuredworkloads.v1beta1.Workload workloads = 1;</code> */ public java.util.List<com.google.cloud.assuredworkloads.v1beta1.Workload.Builder> getWorkloadsBuilderList() { return getWorkloadsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.assuredworkloads.v1beta1.Workload, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder, com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder> getWorkloadsFieldBuilder() { if (workloadsBuilder_ == null) { workloadsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.assuredworkloads.v1beta1.Workload, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder, com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder>( workloads_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); workloads_ = null; } return workloadsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token. Return empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The next page token. Return empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The next page token. Return empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The next page token. Return empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The next page token. Return empty if reached the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse) private static final com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse(); } public static com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListWorkloadsResponse> PARSER = new com.google.protobuf.AbstractParser<ListWorkloadsResponse>() { @java.lang.Override public ListWorkloadsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListWorkloadsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListWorkloadsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.ListWorkloadsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
37,185
hotspot/test/compiler/6711117/Test.java
/* * Copyright (c) 2009, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ /* * @test * @bug 6711117 * @summary Assertion in 64bit server vm (flat != TypePtr::BOTTOM,"cannot alias-analyze an untyped ptr") * @run main/othervm -Xcomp -XX:+IgnoreUnrecognizedVMOptions -XX:+AggressiveOpts -XX:+UseCompressedOops Test */ final class Test_Class_0 { final static char var_1 = 'E'; short var_2 = 16213; final static String var_3 = "jiiibmmsk"; public Test_Class_0() { var_2 ^= 'M'; final String var_18 = var_3; var_2--; var_2 |= (byte)('D' / (byte)var_2) - ((byte)1.6680514E38F << + ((byte)'O') & 7320241275829036032L); func_2(((!false & false | false ? true : false) ? true : true | !true) ? var_2 : 834513107); var_2 >>>= var_1; "smiosoebk".codePointCount(true ^ (false ^ ! !false) ? (byte)- ((byte)430513598) : + ((byte)'_'), ~ (true ? (byte)']' : (byte)-2.8272547997066827E307)); var_2 -= true ? var_1 : var_1; var_2 ^= var_1; var_2 &= (var_2 |= ~ ((byte)(var_2 *= var_2))); long var_19 = 0L; short var_20 = var_2 += 'P'; while (var_19 < 1) { var_2 ^= true ? (byte)- +1.2219539475209E308 : (byte)1.2748408476894178E308; var_19++; var_2 = (byte)((1489358000 == (var_20 | 7816908224315289600L) ? var_1 : var_1) ^ var_19); var_20--; } var_20 -= 'f'; var_20 <<= (((new Test_Class_0[(byte)var_20])[(byte)var_2]).var_2 *= false ? 'g' : 'x'); } static float func_0() { ((new Test_Class_0[(byte)7.774490796987995E307])[(byte)'v']).var_2 <<= false ^ !false ? (short)'v' : "".codePointCount(594464985, 579036736); ((new Test_Class_0[(byte)(((new Test_Class_0[(byte)1361657519])[(byte)2.3703713E38F]).var_2-- - (short)3.5589388134844986E307)])[((true ? !true : false) ^ (!false ? true : !true) ? !false : false) ? (byte)7.047289E37F : (byte)- ((byte)2.6620062118475144E307)]).var_2 *= 3273943364390983680L; --((new Test_Class_0[false ? (byte)(short)1.4965069E36F : (byte)286322022])[(byte)- ((byte)2.742619E38F)]).var_2; long var_4; { double var_5; } var_4 = (byte)1.3509231E38F; ((new Test_Class_0[(byte)'_'])[('g' | 1427123046096105472L) < var_1 >> (byte)(int)(byte)7697616672011068416L ? (byte)var_1 : (byte)1251856579]).var_2--; switch (--((new Test_Class_0[(byte)5.0656327E37F])[(byte)'e']).var_2 != ++((new Test_Class_0[(byte)(int)1.3728667270920175E308])[(byte)+ + -1.6338179407381788E308]).var_2 | !var_3.equalsIgnoreCase("iiwwwln") ? (false ? (byte)1.8291216E38F : (byte)4.778575546584698E307) : (byte)1048254181) { case 99: } { byte var_6 = 13; } var_4 = --((new Test_Class_0[!var_3.endsWith("qaoioore") ^ false ? (byte)2.827362738392923E307 : (byte)~4890175967151316992L])[(byte)(short)var_1]).var_2; ++((new Test_Class_0[(byte)(1.0075552E38F + (short)2083553541)])[(byte)(short)(byte)(short)1.6872205E38F]).var_2; return ((new Test_Class_0[(byte)var_1])[(byte)+ +5760973323384750080L]).var_2 - (false ? (byte)'i' : (var_4 = (short)1.2458781351126844E308) + 2.131006E38F); } public static long func_1(String arg_0, Object arg_1, final long arg_2) { arg_0 = false ? arg_0 : "fgbrpgsq"; ((new Test_Class_0[(byte)- ((byte)']')])[false ? (byte)757239006 : (byte)1866002020]).var_2 ^= (short)(true ? (byte)(((new Test_Class_0[(byte)1416194866])[(byte)1.2309887362692395E308]).var_2 >>= (int)~ ~ ~arg_2) : (byte)5804970709284726784L); final long var_7 = (long)(- + ((long)+ - + - -2.5396583E38F) - - +1.8770165E38F % 2472404173160781824L < --((new Test_Class_0[(byte)5.569360482341752E307])[(byte)(double)(byte)8131142397821553664L]).var_2 ^ true ? (false ? (byte)- -1.163275451591927E308 : (byte)var_1) : (false ? (byte)1843746036 : (byte)1.0209668642291047E308)); arg_0 = (arg_0 = arg_0.substring(699480935)); switch (((new Test_Class_0[(byte)(5415649243316856832L >> 861936806)])[true | true & !false ? (byte)(short)- -7.785169683394908E307 : (byte)+ ((byte)arg_2)]).var_2++) { case 42: case 102: } arg_1 = (true || false ? false : true) ? (arg_0 = (arg_0 = "jbfaru")) : arg_0; arg_1 = new byte[(byte)2.669957E38F]; boolean var_8 = ! ((false ? (short)1.4259420861834744E308 : (short)7.352115508157158E307) != 1.7635658130722812E308); arg_1 = new Object[(byte)- ((byte)(short)1.8950693E38F)]; arg_0 = arg_0; return (byte)1.4762239057269886E308 & 4923938844759802880L; } double[][] func_2(final int arg_0) { var_2 >>>= (var_2 >>= var_2++); float var_9 = 0F; var_2 %= var_2; do { ++var_2; var_9++; var_2++; } while (true && (var_9 < 1 && false)); double var_10 = 0; final int var_11 = 11903395; do { --var_2; var_10++; ++var_2; } while ((false & true || false) && (var_10 < 2 && ~ ((byte)'[') == (byte)(1.1943192E38F % ('c' << var_1) % (byte)((var_2 |= var_2) + 591679039 / ~5932100696448264192L)))); String var_12 = "jkwnk"; var_12 = var_3; var_12 = (var_12 = (var_12 = var_3)); var_12 = "qrhdwx"; var_12 = var_12; short var_13 = (true && true) ^ true | ! (!true || 1646418779 <= (byte)var_1) ? var_2 : var_2; return new double[(byte)var_1][true || false ^ !true ^ true ? (byte)arg_0 : (byte)var_10]; } private final int func_3() { long var_14 = 's' * (~ ~6656240461354863616L * 3151744928387344384L) << ~ (((var_2 >>>= 6600935261424147456L) % 1798503219359364096L | - ~3832249967647077376L / - ((byte)~1529201870915276800L)) / var_2); { var_14 |= !false | (byte)1078230528 >= (byte)1.3972878565417081E308 | (true | !true & !true & !false) ? var_1 : '_'; } long var_15 = 7589204885152164864L; var_2 ^= (var_1 < (byte)'r' ? 475314139 : 'Z') <= 1943074698 ? 'h' : var_1; return 'V' * (false ? (byte)5.498204E37F : (byte)1.0137001669765466E308); } protected static boolean func_4(boolean arg_0, byte arg_1, boolean arg_2) { arg_1++; arg_1 &= (((((new Test_Class_0[arg_1][arg_1][arg_1])[arg_1])[arg_1])[arg_1]).var_2 |= arg_2 ? (short)~3038084056596854784L : (short)+ (arg_1 = arg_1)); arg_0 |= true; arg_1 %= (arg_1 |= ((new Test_Class_0[arg_1])[arg_1]).var_2--); if (false) { arg_0 |= arg_2; } else { ++(((new Test_Class_0[arg_1][arg_1][arg_1])[arg_1 += var_1])[(!arg_2 | (arg_0 &= false)) ^ (arg_0 | arg_0) ? arg_1 : (arg_1 <<= 3192041751921364992L)][arg_1 /= arg_1]).var_2; } arg_1 &= +(new byte[arg_1])[arg_1]; arg_1 <<= 3632133838014908416L; byte[] var_16 = (new byte[arg_1][arg_1--])[arg_1]; long var_17; arg_1 ^= ~ arg_1--; arg_0 ^= (arg_2 ^= 1186877294 >= ((new Test_Class_0[arg_1][arg_1])[arg_1][arg_1]).var_2) & arg_2; return var_3.startsWith(var_3); } public String toString() { String result = "[\n"; result += "Test_Class_0.var_2 = "; result += Test.Printer.print(var_2); result += "\n"; result += "Test_Class_0.var_1 = "; result += Test.Printer.print(var_1); result += "\n"; result += "Test_Class_0.var_3 = "; result += Test.Printer.print(var_3); result += ""; result += "\n]"; return result; } } class Test_Class_1 { static int var_21 = 670918363; final float var_22 = 8.650798E37F; static int var_23 = 1774228457; final int var_24 = 1282736974; final byte var_25 = !false & false | true ? (byte)7.677121016144275E307 : (byte)'r'; static long var_26 = 2939310115459338240L; final long var_27 = var_25 - 7555453173456381952L; double var_28; static String var_29; public Test_Class_1() { var_29 = Test_Class_0.var_3; ((false ? false || ! !true : ! (! !true & !true)) ? new Test_Class_0() : new Test_Class_0()).var_2++; var_23 -= 2.963694E38F; } public String toString() { String result = "[\n"; result += "Test_Class_1.var_21 = "; result += Test.Printer.print(var_21); result += "\n"; result += "Test_Class_1.var_23 = "; result += Test.Printer.print(var_23); result += "\n"; result += "Test_Class_1.var_24 = "; result += Test.Printer.print(var_24); result += "\n"; result += "Test_Class_1.var_26 = "; result += Test.Printer.print(var_26); result += "\n"; result += "Test_Class_1.var_27 = "; result += Test.Printer.print(var_27); result += "\n"; result += "Test_Class_1.var_28 = "; result += Test.Printer.print(var_28); result += "\n"; result += "Test_Class_1.var_22 = "; result += Test.Printer.print(var_22); result += "\n"; result += "Test_Class_1.var_25 = "; result += Test.Printer.print(var_25); result += "\n"; result += "Test_Class_1.var_29 = "; result += Test.Printer.print(var_29); result += ""; result += "\n]"; return result; } } class Test_Class_2 { double var_30; static byte var_31; static char var_32; float var_33; double var_34 = !false & (true ? true : ! !true && false) ? 'q' - 4789231433793305600L - (var_33 = -1.0677024E38F) : 2.65473560313378E307; final double var_35 = ~Test_Class_1.var_26 == 5.145660681364723E307 | false ? 1.4134775E38F : 1.77223030708671E308; final int var_36 = Test_Class_1.var_23 |= Test_Class_1.var_21++; public Test_Class_2() { Test_Class_0.var_3.replace(Test_Class_0.var_1, 'Q'); var_32 = (var_32 = (var_32 = '_')); Test_Class_1.var_26 |= Test_Class_0.var_1; Test_Class_1.var_29 = (Test_Class_1.var_29 = Test_Class_0.var_3); var_32 = Test_Class_0.var_1; var_33 = ((new Test_Class_0[(byte)851412948463452160L])[var_31 = new Test_Class_1().var_25]).var_2; var_33 = ! (((!false | false) & (false || !true) ? false : ! !false) | false) ? new Test_Class_1().var_25 : (var_31 = new Test_Class_1().var_25); float var_38 = 0F; var_34 /= 5336005797857974272L; for ("ccnyq".endsWith((new String[(byte)Test_Class_1.var_26])[var_31 = (var_31 = (var_31 = (byte)4.7927775E37F))]); var_38 < 2; var_32 = '^' <= Test_Class_0.var_1 ^ true ? (var_32 = Test_Class_0.var_1) : (var_32 = 'V')) { var_32 = true ? 'a' : (var_32 = Test_Class_0.var_1); var_38++; var_33 = new Test_Class_1().var_24; var_32 = ! (true || true ? !false : (short)3.2844383E37F < 2.1400662E38F) ? (char)1.2691096999143248E308 : (! !false ^ true ? 's' : 'q'); } var_32 = 'B'; { var_32 = Test_Class_0.var_1; } var_32 = Test_Class_0.var_1; Test_Class_1.var_29 = "ov"; Test_Class_1.var_29 = "smtolghw"; } protected final static String func_0(final long[][] arg_0, byte arg_1, char arg_2) { arg_1 <<= (((new Test_Class_2[arg_1])[arg_1]).var_34 > new Test_Class_0().var_2 | true ? new Test_Class_0() : (new Test_Class_0[arg_1][arg_1])[new Test_Class_1().var_25][new Test_Class_1().var_25]).var_2; Test_Class_1.var_26 >>>= (!true | !true | (new boolean[arg_1])[arg_1] || true ? (new Test_Class_1[arg_1])[arg_1] : new Test_Class_1()).var_27; float var_37 = 0F; arg_2 >>= ((new Test_Class_1[arg_1][arg_1])[arg_1][arg_1]).var_25; do { ((new Test_Class_2[arg_1 /= 2055714081])[arg_1]).var_34 = 'l'; var_37++; Test_Class_1.var_29 = Test_Class_0.var_3; } while ((false ? false : false) && var_37 < 7); Test_Class_1.var_29 = Test_Class_0.var_3 + ""; ((new Test_Class_2[new Test_Class_1().var_25][new Test_Class_1().var_25])[new Test_Class_1().var_25][arg_1 |= new Test_Class_0().var_2]).var_34 += Test_Class_0.var_1; return "esb"; } public String toString() { String result = "[\n"; result += "Test_Class_2.var_32 = "; result += Test.Printer.print(var_32); result += "\n"; result += "Test_Class_2.var_36 = "; result += Test.Printer.print(var_36); result += "\n"; result += "Test_Class_2.var_30 = "; result += Test.Printer.print(var_30); result += "\n"; result += "Test_Class_2.var_34 = "; result += Test.Printer.print(var_34); result += "\n"; result += "Test_Class_2.var_35 = "; result += Test.Printer.print(var_35); result += "\n"; result += "Test_Class_2.var_33 = "; result += Test.Printer.print(var_33); result += "\n"; result += "Test_Class_2.var_31 = "; result += Test.Printer.print(var_31); result += ""; result += "\n]"; return result; } } final class Test_Class_3 extends Test_Class_2 { byte var_39 = 23; static boolean var_40 = false; public Test_Class_3() { if (true) { Test_Class_1.var_21 |= new Test_Class_1().var_27; } else { final float var_46 = 7.9266674E37F; ++Test_Class_1.var_26; } { Test_Class_1.var_23++; } var_30 = ((new Test_Class_1[var_39][var_39])[var_39][var_39]).var_25; if (var_40 &= (var_40 |= (var_40 |= var_40))) { Test_Class_0.var_3.indexOf(Test_Class_1.var_29 = "xfgyblg", 'X' >>> ((Test_Class_1)(new Object[var_39])[((new Test_Class_1[var_39])[var_39]).var_25]).var_27); } else { var_40 &= var_40 && var_40; } ((Test_Class_2)(((new boolean[var_39])[var_39++] ? (var_40 &= var_40) : (var_40 &= false)) ? (new Test_Class_2[var_39][var_39])[var_39][var_39] : (new Object[var_39][var_39])[var_39][var_39])).var_33 = (var_40 ? new Test_Class_1() : new Test_Class_1()).var_25; switch (var_39) { case 24: } var_39 += (((var_40 ^= true) ? new Test_Class_0() : new Test_Class_0()).var_2 ^= var_40 & (var_40 | false) ? var_39-- : var_36); new Test_Class_0().var_2 %= (new Test_Class_0().var_2 += (var_39 ^= Test_Class_1.var_26)); } private static String func_0() { --Test_Class_1.var_26; { Test_Class_1.var_29 = var_40 ? Test_Class_0.var_3 : "rahqjhqf"; } if (var_40 ^= var_40) { Test_Class_1.var_26 >>= (Test_Class_2.var_32 = Test_Class_0.var_1) / new Test_Class_0().var_2; } else { ++Test_Class_1.var_21; } ++Test_Class_1.var_26; int var_41 = 0; ++Test_Class_1.var_26; do { var_40 = (var_40 = true); var_41++; Test_Class_0 var_42 = new Test_Class_0(); } while (var_41 < 1); Test_Class_1.var_29 = "f"; Test_Class_1 var_43; var_43 = (var_43 = new Test_Class_1()); Test_Class_2.var_32 = 'V'; long var_44 = 0L; Test_Class_1.var_23--; while (var_40 && (var_44 < 1 && var_40)) { Test_Class_1.var_29 = "bsgewkmk"; var_44++; Test_Class_1.var_29 = "ktegattny"; var_40 &= var_40 ^ (var_40 |= (short)4.4487427E37F < 'n') & true; } Test_Class_1.var_23 %= (((var_40 |= true & (var_40 &= var_40)) ^ true ? new Test_Class_0() : new Test_Class_0()).var_2 -= 1.6638270827800162E308); float var_45; var_32 = (Test_Class_2.var_32 = Test_Class_0.var_1); return false ? "fluk" : "wt"; } public String toString() { String result = "[\n"; result += "Test_Class_3.var_32 = "; result += Test.Printer.print(var_32); result += "\n"; result += "Test_Class_3.var_36 = "; result += Test.Printer.print(var_36); result += "\n"; result += "Test_Class_3.var_30 = "; result += Test.Printer.print(var_30); result += "\n"; result += "Test_Class_3.var_34 = "; result += Test.Printer.print(var_34); result += "\n"; result += "Test_Class_3.var_35 = "; result += Test.Printer.print(var_35); result += "\n"; result += "Test_Class_3.var_33 = "; result += Test.Printer.print(var_33); result += "\n"; result += "Test_Class_3.var_31 = "; result += Test.Printer.print(var_31); result += "\n"; result += "Test_Class_3.var_39 = "; result += Test.Printer.print(var_39); result += "\n"; result += "Test_Class_3.var_40 = "; result += Test.Printer.print(var_40); result += ""; result += "\n]"; return result; } } class Test_Class_4 { final float var_47 = 1.9043434E38F; final byte var_48 = 32; final float var_49 = 2.8176504E38F; final char var_50 = 'r'; final String var_51 = "uwgmnjpg"; static int var_52; short[] var_53; Test_Class_1 var_54; public Test_Class_4() { final float var_55 = (3.1554042E38F == var_50 ^ (Test_Class_3.var_40 |= true) ? (Test_Class_3.var_40 ^= Test_Class_3.var_40) ^ true : Test_Class_3.var_40) ? new Test_Class_0().var_2 : 2.965321E38F; new Test_Class_0().var_2 = (new Test_Class_0().var_2 >>= +new Test_Class_1().var_25); ((Test_Class_1.var_29 = (Test_Class_1.var_29 = (Test_Class_1.var_29 = "l"))) + "").equalsIgnoreCase(Test_Class_1.var_29 = "garnio"); double var_56 = 0; Test_Class_1.var_29 = var_51; while (var_56 < 1) { ((Test_Class_3)(Test_Class_2)(new Object[var_48])[var_48]).var_33 = ++Test_Class_1.var_26; var_56++; Test_Class_1.var_29 = (Test_Class_1.var_29 = "fvyjrih"); float[] var_57; } { ((new Test_Class_2[var_48])[((new Test_Class_3[var_48][var_48])[var_48][var_48]).var_39]).var_34 *= 2.2119221943262553E307; Test_Class_2.var_32 = true ? 'q' : 't'; ((new Test_Class_3[--((Test_Class_3)new Test_Class_2()).var_39])[var_48]).var_33 = new Test_Class_0().var_2; int var_58 = 'i' >> (var_48 << Test_Class_0.var_1); } Test_Class_3.var_40 &= true && var_51.equalsIgnoreCase(var_51) || new Test_Class_0().var_2 < --((new Test_Class_3[var_48])[var_48]).var_39; ((Test_Class_3)(Test_Class_2)(new Object[var_48][var_48])[var_48][var_48]).var_34 += Test_Class_1.var_26--; var_54 = new Test_Class_1(); Test_Class_3.var_40 |= (long)(!true ^ var_47 > ((Test_Class_2)(new Object[var_48])[var_48]).var_34 ? (Test_Class_2.var_31 = (Test_Class_3.var_31 = (Test_Class_3.var_31 = var_48))) : (var_54 = new Test_Class_1()).var_25) <= var_48; (Test_Class_3.var_40 ? (true ? new Test_Class_0() : new Test_Class_0()) : new Test_Class_0()).var_2 &= var_48; (Test_Class_3.var_40 ? (Test_Class_3)new Test_Class_2() : (new Test_Class_3[var_48][var_48])[var_48][var_48]).var_34 += Test_Class_1.var_21; Test_Class_3 var_59; Test_Class_2.var_32 = 'H'; --Test_Class_1.var_26; } public String toString() { String result = "[\n"; result += "Test_Class_4.var_50 = "; result += Test.Printer.print(var_50); result += "\n"; result += "Test_Class_4.var_52 = "; result += Test.Printer.print(var_52); result += "\n"; result += "Test_Class_4.var_53 = "; result += Test.Printer.print(var_53); result += "\n"; result += "Test_Class_4.var_47 = "; result += Test.Printer.print(var_47); result += "\n"; result += "Test_Class_4.var_49 = "; result += Test.Printer.print(var_49); result += "\n"; result += "Test_Class_4.var_48 = "; result += Test.Printer.print(var_48); result += "\n"; result += "Test_Class_4.var_51 = "; result += Test.Printer.print(var_51); result += "\n"; result += "Test_Class_4.var_54 = "; result += Test.Printer.print(var_54); result += ""; result += "\n]"; return result; } } class Test_Class_5 extends Test_Class_4 { char var_60 = '_'; final byte var_61 = 101; public Test_Class_5() { Test_Class_0.var_3.indexOf(Test_Class_1.var_21, (Test_Class_3.var_40 |= Test_Class_3.var_40) ? new Test_Class_1().var_24 : 'i'); } final char func_0(Test_Class_1 arg_0, final Test_Class_1 arg_1) { long var_62 = 0L; "aoal".toLowerCase(); for (byte var_63 = arg_0.var_25; var_62 < 1 && "ji".startsWith("dikrs".endsWith("va") ? (Test_Class_1.var_29 = "mvp") : Test_Class_0.var_3, Test_Class_1.var_23); ((Test_Class_2)(new Object[arg_0.var_25])[var_63]).var_34 -= new Test_Class_2().var_36) { ((Test_Class_3.var_40 ? false : Test_Class_3.var_40) ? (Test_Class_0)(new Object[arg_1.var_25][arg_1.var_25])[arg_1.var_25][var_63] : (Test_Class_0)(new Object[var_48][var_48])[var_63][var_63]).var_2 += true ^ Test_Class_3.var_40 ^ (((new Test_Class_3[var_63][var_63])[var_63][var_61]).var_35 != 2.1423512E38F | ! !false) ? var_49 + ~var_48 : 3.1549515E38F; var_62++; (!false & ((Test_Class_3.var_40 |= (Test_Class_3.var_40 ^= true)) & true) ? (Test_Class_2)(new Object[var_63])[var_63] : (new Test_Class_2[var_63][var_61])[var_63][arg_0.var_25]).var_33 = (var_60 *= (var_60 *= ((new Test_Class_3[var_48][var_61])[var_61][var_63]).var_35)); float var_64; } Test_Class_1.var_29 = "xyenjknu"; Test_Class_3.var_40 ^= (Test_Class_3.var_40 = !false & true) ? Test_Class_3.var_40 : Test_Class_3.var_40; ((new Test_Class_2[var_48][arg_1.var_25])[arg_0.var_25][var_48]).var_33 = var_61; Test_Class_1.var_21 |= --(((new Test_Class_3[Test_Class_3.var_31 = arg_0.var_25][var_61])[var_61])[(((new Test_Class_3[var_48][var_61])[var_48])[((Test_Class_3)(new Test_Class_2[var_48][arg_0.var_25])[var_61][var_48]).var_39]).var_39 >>>= var_60]).var_39; var_51.compareToIgnoreCase("hgcaybk"); Test_Class_0 var_65 = (Test_Class_1.var_29 = "t").codePointBefore(1602805584) >= (float)((new Test_Class_3[var_48][var_61])[var_48][Test_Class_2.var_31 = arg_1.var_25]).var_39 - 7.256386549028811E307 ? new Test_Class_0() : ((new Test_Class_0[arg_0.var_25][var_48][var_48])[arg_0.var_25])[arg_0.var_25][Test_Class_2.var_31 = arg_1.var_25]; return 'U'; } protected static Test_Class_1 func_1(final short arg_0, long arg_1) { --new Test_Class_0().var_2; "xb".length(); if ((Test_Class_3.var_40 ^= (Test_Class_2.var_32 = Test_Class_0.var_1) == 1.2609472E38F) ? (Test_Class_3.var_40 = (Test_Class_3.var_40 = Test_Class_3.var_40)) : true) { --Test_Class_1.var_26; } else { "ybbe".substring(209378562, var_52 = (Test_Class_1.var_21 |= (Test_Class_2.var_31 = (byte)'a'))); } Test_Class_3.var_40 &= (Test_Class_3.var_40 &= true) && (Test_Class_1.var_29 = (Test_Class_1.var_29 = Test_Class_0.var_3)).endsWith(Test_Class_0.var_3); (false ? new Test_Class_0() : new Test_Class_0()).var_2 >>= new Test_Class_1().var_25; return 9.430116214455637E307 <= (true ? (Test_Class_3)new Test_Class_2() : (Test_Class_3)new Test_Class_2()).var_34 ? new Test_Class_1() : new Test_Class_1(); } public String toString() { String result = "[\n"; result += "Test_Class_5.var_50 = "; result += Test.Printer.print(var_50); result += "\n"; result += "Test_Class_5.var_60 = "; result += Test.Printer.print(var_60); result += "\n"; result += "Test_Class_5.var_52 = "; result += Test.Printer.print(var_52); result += "\n"; result += "Test_Class_5.var_53 = "; result += Test.Printer.print(var_53); result += "\n"; result += "Test_Class_5.var_47 = "; result += Test.Printer.print(var_47); result += "\n"; result += "Test_Class_5.var_49 = "; result += Test.Printer.print(var_49); result += "\n"; result += "Test_Class_5.var_48 = "; result += Test.Printer.print(var_48); result += "\n"; result += "Test_Class_5.var_61 = "; result += Test.Printer.print(var_61); result += "\n"; result += "Test_Class_5.var_51 = "; result += Test.Printer.print(var_51); result += "\n"; result += "Test_Class_5.var_54 = "; result += Test.Printer.print(var_54); result += ""; result += "\n]"; return result; } } public class Test { Test_Class_4 var_66; Test_Class_3 var_67; Test_Class_5 var_68; Test_Class_2[] var_69; long var_70 = ++Test_Class_1.var_26 & Test_Class_1.var_21++; final static double var_71 = 3.566207721984698E307; static boolean var_72; final static String var_73 = "nmxx"; private final char func_0(Test_Class_3 arg_0, final boolean[] arg_1) { ((Test_Class_5)(arg_1[arg_0.var_39++] ? new Test_Class_2[(var_67 = arg_0).var_39] : (new Object[arg_0.var_39])[arg_0.var_39])).var_54 = new Test_Class_1(); new Test_Class_0(); (((new Test[arg_0.var_39][arg_0.var_39][arg_0.var_39])[++arg_0.var_39])[arg_0.var_39][arg_0.var_39]).var_66 = (var_68 = (new Test_Class_5[arg_0.var_39][arg_0.var_39])[arg_0.var_39][arg_0.var_39]); ((new Test[arg_0.var_39])[(arg_0 = (var_67 = (arg_0 = arg_0))).var_39]).var_70 = ((new long[arg_0.var_39][arg_0.var_39])[arg_0.var_39])[arg_0.var_39 = ((var_67 = (arg_0 = arg_0)).var_39 -= new Test_Class_0().var_2)] << ']'; arg_0 = (new Test_Class_0().var_2 *= ((new Test_Class_2[arg_0.var_39])[arg_0.var_39]).var_34) >= arg_0.var_39 ? (var_67 = arg_0) : (arg_0 = arg_0); Test_Class_1.var_26--; Test_Class_4 var_74 = var_66 = (Test_Class_5)(new Test_Class_4[arg_0.var_39])[arg_0.var_39]; Test_Class_3.var_40 ^= ! (Test_Class_3.var_40 &= (Test_Class_3.var_40 ^= Test_Class_3.var_40) | (Test_Class_3.var_40 &= Test_Class_3.var_40)); var_72 = (arg_1[(var_67 = arg_0).var_39] | !Test_Class_3.var_40 & !Test_Class_3.var_40 ? (Test_Class_1.var_29 = var_73).endsWith((var_66 = var_74).var_51) && (Test_Class_3.var_40 ^= Test_Class_3.var_40) : (Test_Class_3.var_40 ^= Test_Class_3.var_40)) ^ !Test_Class_3.var_40; Test_Class_3.var_40 &= (Test_Class_3.var_40 &= (Test_Class_3.var_40 = Test_Class_3.var_40) & Test_Class_3.var_40 ^ Test_Class_3.var_40); arg_0.var_39 -= --var_70; int var_75; double var_76; { boolean var_77; var_70 ^= new Test_Class_0().var_2++; } Test_Class_1.var_26 /= Test_Class_0.var_3.lastIndexOf(~new Test_Class_1().var_25, Test_Class_1.var_21); Test_Class_1.var_26 |= Test_Class_1.var_21; (((new Test_Class_3[arg_0.var_39][arg_0.var_39][var_74.var_48])[arg_0.var_39])[arg_0.var_39][arg_0.var_39]).var_34 %= (var_67 = arg_0).var_39; Test_Class_1.var_21 &= arg_0.var_39; var_68 = (var_68 = (Test_Class_5)var_74); var_72 = false; return new Test_Class_5().var_60 ^= 'v'; } public static Test_Class_2 func_1(byte[][] arg_0, final int arg_1, Test_Class_1 arg_2, final Test_Class_1 arg_3) { ((new Test[arg_3.var_25])[((Test_Class_3)new Test_Class_2()).var_39 *= --Test_Class_1.var_26]).var_67 = (((new Test[arg_2.var_25])[(((new Test[arg_2.var_25][arg_2.var_25])[arg_3.var_25][arg_3.var_25]).var_67 = (new Test_Class_3[arg_2.var_25][arg_2.var_25])[arg_2.var_25][arg_3.var_25]).var_39 %= Test_Class_1.var_26]).var_67 = (((new Test[arg_3.var_25][arg_2.var_25])[arg_3.var_25][arg_2.var_25]).var_67 = (((new Test[arg_3.var_25])[arg_2.var_25]).var_67 = (Test_Class_3)new Test_Class_2()))); { --Test_Class_1.var_26; } if (!Test_Class_3.var_40) { "jfqj".replaceAll("ac", Test_Class_0.var_3); } else { arg_2 = (((new Test_Class_5[arg_3.var_25][arg_2.var_25])[((new Test_Class_3[arg_2.var_25])[arg_3.var_25]).var_39][((Test_Class_3)(new Test_Class_2[arg_2.var_25])[arg_3.var_25]).var_39]).var_54 = arg_3); new Test_Class_1(); } if (true) { Test_Class_0.func_0(); } else { Test_Class_1.var_23 /= Test_Class_1.var_26; } Test_Class_1.var_26--; Test_Class_1.var_23 ^= Test_Class_0.var_1; return new Test_Class_2(); } public static String execute() { try { Test t = new Test(); try { t.test(); } catch(Throwable e) { } try { return t.toString(); } catch (Throwable e) { return "Error during result conversion to String"; } } catch (Throwable e) { return "Error during test execution"; } } public static void main(String[] args) { try { Test t = new Test(); try { t.test(); } catch(Throwable e) { } try { System.out.println(t); } catch(Throwable e) { } } catch (Throwable e) { } } private void test() { double var_78 = 0; --Test_Class_1.var_26; long var_79; for (var_70 /= 8.089457748637276E307; var_78 < 162 && !true & (true ? Test_Class_3.var_40 : (Test_Class_3.var_40 ^= Test_Class_3.var_40)); Test_Class_1.var_26 -= 1.2513521E38F) { short var_80 = 10682; Test_Class_1.var_21--; var_78++; var_72 = (Test_Class_3.var_40 |= (Test_Class_3.var_40 ^= false)); ++Test_Class_1.var_26; } Test_Class_2 var_81; new Test_Class_4(); int var_82 = 0; ++Test_Class_1.var_23; do { --Test_Class_1.var_26; var_82++; ++Test_Class_1.var_21; } while ((Test_Class_3.var_40 ^= false & false) && var_82 < 256); Test_Class_1.var_23 |= (var_68 = (var_68 = (Test_Class_5)(var_66 = new Test_Class_4()))).var_48 + (Test_Class_1.var_26 >>> new Test_Class_0().var_2); (true ? new Test_Class_5() : (var_68 = (var_68 = new Test_Class_5()))).var_60 *= Test_Class_0.var_1; } public String toString() { String result = "[\n"; result += "Test.var_69 = "; result += Printer.print(var_69); result += "\n"; result += "Test.var_70 = "; result += Printer.print(var_70); result += "\n"; result += "Test.var_71 = "; result += Printer.print(var_71); result += "\n"; result += "Test.var_73 = "; result += Printer.print(var_73); result += "\n"; result += "Test.var_68 = "; result += Printer.print(var_68); result += "\n"; result += "Test.var_66 = "; result += Printer.print(var_66); result += "\n"; result += "Test.var_72 = "; result += Printer.print(var_72); result += "\n"; result += "Test.var_67 = "; result += Printer.print(var_67); result += ""; result += "\n]"; return result; } static class Printer { public static String print(boolean arg) { return String.valueOf(arg); } public static String print(byte arg) { return String.valueOf(arg); } public static String print(short arg) { return String.valueOf(arg); } public static String print(char arg) { return String.valueOf((int)arg); } public static String print(int arg) { return String.valueOf(arg); } public static String print(long arg) { return String.valueOf(arg); } public static String print(float arg) { return String.valueOf(arg); } public static String print(double arg) { return String.valueOf(arg); } public static String print(Object arg) { return print_r(new java.util.Stack(), arg); } private static String print_r(java.util.Stack visitedObjects, Object arg) { String result = ""; if (arg == null) result += "null"; else if (arg.getClass().isArray()) { for (int i = 0; i < visitedObjects.size(); i++) if (visitedObjects.elementAt(i) == arg) return "<recursive>"; visitedObjects.push(arg); final String delimiter = ", "; result += "["; if (arg instanceof Object[]) { Object[] array = (Object[]) arg; for (int i = 0; i < array.length; i++) { result += print_r(visitedObjects, array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof boolean[]) { boolean[] array = (boolean[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof byte[]) { byte[] array = (byte[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof short[]) { short[] array = (short[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof char[]) { char[] array = (char[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof int[]) { int[] array = (int[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof long[]) { long[] array = (long[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof float[]) { float[] array = (float[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } else if (arg instanceof double[]) { double[] array = (double[]) arg; for (int i = 0; i < array.length; i++) { result += print(array[i]); if (i < array.length - 1) result += delimiter; } } result += "]"; visitedObjects.pop(); } else { result += arg.toString(); } return result; } } }
googleapis/google-cloud-java
37,098
java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/HttpOperation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apihub/v1/common_fields.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apihub.v1; /** * * * <pre> * The HTTP Operation. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.HttpOperation} */ public final class HttpOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.HttpOperation) HttpOperationOrBuilder { private static final long serialVersionUID = 0L; // Use HttpOperation.newBuilder() to construct. private HttpOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private HttpOperation() { method_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new HttpOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.CommonFieldsProto .internal_static_google_cloud_apihub_v1_HttpOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.CommonFieldsProto .internal_static_google_cloud_apihub_v1_HttpOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.HttpOperation.class, com.google.cloud.apihub.v1.HttpOperation.Builder.class); } /** * * * <pre> * Enumeration of Method types. * </pre> * * Protobuf enum {@code google.cloud.apihub.v1.HttpOperation.Method} */ public enum Method implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Method unspecified. * </pre> * * <code>METHOD_UNSPECIFIED = 0;</code> */ METHOD_UNSPECIFIED(0), /** * * * <pre> * Get Operation type. * </pre> * * <code>GET = 1;</code> */ GET(1), /** * * * <pre> * Put Operation type. * </pre> * * <code>PUT = 2;</code> */ PUT(2), /** * * * <pre> * Post Operation type. * </pre> * * <code>POST = 3;</code> */ POST(3), /** * * * <pre> * Delete Operation type. * </pre> * * <code>DELETE = 4;</code> */ DELETE(4), /** * * * <pre> * Options Operation type. * </pre> * * <code>OPTIONS = 5;</code> */ OPTIONS(5), /** * * * <pre> * Head Operation type. * </pre> * * <code>HEAD = 6;</code> */ HEAD(6), /** * * * <pre> * Patch Operation type. * </pre> * * <code>PATCH = 7;</code> */ PATCH(7), /** * * * <pre> * Trace Operation type. * </pre> * * <code>TRACE = 8;</code> */ TRACE(8), UNRECOGNIZED(-1), ; /** * * * <pre> * Method unspecified. * </pre> * * <code>METHOD_UNSPECIFIED = 0;</code> */ public static final int METHOD_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Get Operation type. * </pre> * * <code>GET = 1;</code> */ public static final int GET_VALUE = 1; /** * * * <pre> * Put Operation type. * </pre> * * <code>PUT = 2;</code> */ public static final int PUT_VALUE = 2; /** * * * <pre> * Post Operation type. * </pre> * * <code>POST = 3;</code> */ public static final int POST_VALUE = 3; /** * * * <pre> * Delete Operation type. * </pre> * * <code>DELETE = 4;</code> */ public static final int DELETE_VALUE = 4; /** * * * <pre> * Options Operation type. * </pre> * * <code>OPTIONS = 5;</code> */ public static final int OPTIONS_VALUE = 5; /** * * * <pre> * Head Operation type. * </pre> * * <code>HEAD = 6;</code> */ public static final int HEAD_VALUE = 6; /** * * * <pre> * Patch Operation type. * </pre> * * <code>PATCH = 7;</code> */ public static final int PATCH_VALUE = 7; /** * * * <pre> * Trace Operation type. * </pre> * * <code>TRACE = 8;</code> */ public static final int TRACE_VALUE = 8; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Method valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Method forNumber(int value) { switch (value) { case 0: return METHOD_UNSPECIFIED; case 1: return GET; case 2: return PUT; case 3: return POST; case 4: return DELETE; case 5: return OPTIONS; case 6: return HEAD; case 7: return PATCH; case 8: return TRACE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Method> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Method> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Method>() { public Method findValueByNumber(int number) { return Method.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.apihub.v1.HttpOperation.getDescriptor().getEnumTypes().get(0); } private static final Method[] VALUES = values(); public static Method valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Method(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.apihub.v1.HttpOperation.Method) } private int bitField0_; public static final int PATH_FIELD_NUMBER = 1; private com.google.cloud.apihub.v1.Path path_; /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the path field is set. */ @java.lang.Override public boolean hasPath() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The path. */ @java.lang.Override public com.google.cloud.apihub.v1.Path getPath() { return path_ == null ? com.google.cloud.apihub.v1.Path.getDefaultInstance() : path_; } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ @java.lang.Override public com.google.cloud.apihub.v1.PathOrBuilder getPathOrBuilder() { return path_ == null ? com.google.cloud.apihub.v1.Path.getDefaultInstance() : path_; } public static final int METHOD_FIELD_NUMBER = 2; private int method_ = 0; /** * * * <pre> * Optional. Operation method * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code> * .google.cloud.apihub.v1.HttpOperation.Method method = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for method. */ @java.lang.Override public int getMethodValue() { return method_; } /** * * * <pre> * Optional. Operation method * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code> * .google.cloud.apihub.v1.HttpOperation.Method method = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The method. */ @java.lang.Override public com.google.cloud.apihub.v1.HttpOperation.Method getMethod() { com.google.cloud.apihub.v1.HttpOperation.Method result = com.google.cloud.apihub.v1.HttpOperation.Method.forNumber(method_); return result == null ? com.google.cloud.apihub.v1.HttpOperation.Method.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPath()); } if (method_ != com.google.cloud.apihub.v1.HttpOperation.Method.METHOD_UNSPECIFIED.getNumber()) { output.writeEnum(2, method_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPath()); } if (method_ != com.google.cloud.apihub.v1.HttpOperation.Method.METHOD_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, method_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apihub.v1.HttpOperation)) { return super.equals(obj); } com.google.cloud.apihub.v1.HttpOperation other = (com.google.cloud.apihub.v1.HttpOperation) obj; if (hasPath() != other.hasPath()) return false; if (hasPath()) { if (!getPath().equals(other.getPath())) return false; } if (method_ != other.method_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPath()) { hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); } hash = (37 * hash) + METHOD_FIELD_NUMBER; hash = (53 * hash) + method_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apihub.v1.HttpOperation parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.HttpOperation parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.HttpOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.HttpOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.apihub.v1.HttpOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The HTTP Operation. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.HttpOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.HttpOperation) com.google.cloud.apihub.v1.HttpOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.CommonFieldsProto .internal_static_google_cloud_apihub_v1_HttpOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.CommonFieldsProto .internal_static_google_cloud_apihub_v1_HttpOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.HttpOperation.class, com.google.cloud.apihub.v1.HttpOperation.Builder.class); } // Construct using com.google.cloud.apihub.v1.HttpOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getPathFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; path_ = null; if (pathBuilder_ != null) { pathBuilder_.dispose(); pathBuilder_ = null; } method_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apihub.v1.CommonFieldsProto .internal_static_google_cloud_apihub_v1_HttpOperation_descriptor; } @java.lang.Override public com.google.cloud.apihub.v1.HttpOperation getDefaultInstanceForType() { return com.google.cloud.apihub.v1.HttpOperation.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apihub.v1.HttpOperation build() { com.google.cloud.apihub.v1.HttpOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apihub.v1.HttpOperation buildPartial() { com.google.cloud.apihub.v1.HttpOperation result = new com.google.cloud.apihub.v1.HttpOperation(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.apihub.v1.HttpOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.path_ = pathBuilder_ == null ? path_ : pathBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.method_ = method_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apihub.v1.HttpOperation) { return mergeFrom((com.google.cloud.apihub.v1.HttpOperation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.apihub.v1.HttpOperation other) { if (other == com.google.cloud.apihub.v1.HttpOperation.getDefaultInstance()) return this; if (other.hasPath()) { mergePath(other.getPath()); } if (other.method_ != 0) { setMethodValue(other.getMethodValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getPathFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 16: { method_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.apihub.v1.Path path_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.Path, com.google.cloud.apihub.v1.Path.Builder, com.google.cloud.apihub.v1.PathOrBuilder> pathBuilder_; /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the path field is set. */ public boolean hasPath() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The path. */ public com.google.cloud.apihub.v1.Path getPath() { if (pathBuilder_ == null) { return path_ == null ? com.google.cloud.apihub.v1.Path.getDefaultInstance() : path_; } else { return pathBuilder_.getMessage(); } } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ public Builder setPath(com.google.cloud.apihub.v1.Path value) { if (pathBuilder_ == null) { if (value == null) { throw new NullPointerException(); } path_ = value; } else { pathBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ public Builder setPath(com.google.cloud.apihub.v1.Path.Builder builderForValue) { if (pathBuilder_ == null) { path_ = builderForValue.build(); } else { pathBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ public Builder mergePath(com.google.cloud.apihub.v1.Path value) { if (pathBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && path_ != null && path_ != com.google.cloud.apihub.v1.Path.getDefaultInstance()) { getPathBuilder().mergeFrom(value); } else { path_ = value; } } else { pathBuilder_.mergeFrom(value); } if (path_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ public Builder clearPath() { bitField0_ = (bitField0_ & ~0x00000001); path_ = null; if (pathBuilder_ != null) { pathBuilder_.dispose(); pathBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ public com.google.cloud.apihub.v1.Path.Builder getPathBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPathFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ public com.google.cloud.apihub.v1.PathOrBuilder getPathOrBuilder() { if (pathBuilder_ != null) { return pathBuilder_.getMessageOrBuilder(); } else { return path_ == null ? com.google.cloud.apihub.v1.Path.getDefaultInstance() : path_; } } /** * * * <pre> * Optional. The path details for the Operation. * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code>.google.cloud.apihub.v1.Path path = 1 [(.google.api.field_behavior) = OPTIONAL];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.Path, com.google.cloud.apihub.v1.Path.Builder, com.google.cloud.apihub.v1.PathOrBuilder> getPathFieldBuilder() { if (pathBuilder_ == null) { pathBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.Path, com.google.cloud.apihub.v1.Path.Builder, com.google.cloud.apihub.v1.PathOrBuilder>( getPath(), getParentForChildren(), isClean()); path_ = null; } return pathBuilder_; } private int method_ = 0; /** * * * <pre> * Optional. Operation method * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code> * .google.cloud.apihub.v1.HttpOperation.Method method = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for method. */ @java.lang.Override public int getMethodValue() { return method_; } /** * * * <pre> * Optional. Operation method * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code> * .google.cloud.apihub.v1.HttpOperation.Method method = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for method to set. * @return This builder for chaining. */ public Builder setMethodValue(int value) { method_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Operation method * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code> * .google.cloud.apihub.v1.HttpOperation.Method method = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The method. */ @java.lang.Override public com.google.cloud.apihub.v1.HttpOperation.Method getMethod() { com.google.cloud.apihub.v1.HttpOperation.Method result = com.google.cloud.apihub.v1.HttpOperation.Method.forNumber(method_); return result == null ? com.google.cloud.apihub.v1.HttpOperation.Method.UNRECOGNIZED : result; } /** * * * <pre> * Optional. Operation method * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code> * .google.cloud.apihub.v1.HttpOperation.Method method = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The method to set. * @return This builder for chaining. */ public Builder setMethod(com.google.cloud.apihub.v1.HttpOperation.Method value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; method_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. Operation method * Note: Even though this field is optional, it is required for * [CreateApiOperation][google.cloud.apihub.v1.ApiHub.CreateApiOperation] * API and we will fail the request if not provided. * </pre> * * <code> * .google.cloud.apihub.v1.HttpOperation.Method method = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearMethod() { bitField0_ = (bitField0_ & ~0x00000002); method_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.HttpOperation) } // @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.HttpOperation) private static final com.google.cloud.apihub.v1.HttpOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.HttpOperation(); } public static com.google.cloud.apihub.v1.HttpOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<HttpOperation> PARSER = new com.google.protobuf.AbstractParser<HttpOperation>() { @java.lang.Override public HttpOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<HttpOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<HttpOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apihub.v1.HttpOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/harmony
35,633
classlib/modules/archive/src/test/java/org/apache/harmony/archive/tests/java/util/zip/InflaterTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.archive.tests.java.util.zip; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.zip.Adler32; import java.util.zip.DataFormatException; import java.util.zip.Deflater; import java.util.zip.DeflaterOutputStream; import java.util.zip.Inflater; import java.util.zip.ZipException; import tests.support.resource.Support_Resources; public class InflaterTest extends junit.framework.TestCase { byte outPutBuff1[] = new byte[500]; byte outPutDiction[] = new byte[500]; /** * @tests java.util.zip.Inflater#end() */ public void test_end() { // test method of java.util.zip.inflater.end() byte byteArray[] = { 5, 2, 3, 7, 8 }; int r = 0; Inflater inflate = new Inflater(); inflate.setInput(byteArray); inflate.end(); try { inflate.reset(); inflate.setInput(byteArray); } catch (NullPointerException e) { r = 1; } assertEquals("inflate can still be used after end is called", 1, r); Inflater i = new Inflater(); i.end(); // check for exception i.end(); } /** * @tests java.util.zip.Inflater#finished() */ public void test_finished() { // test method of java.util.zip.inflater.finished() byte byteArray[] = { 1, 3, 4, 7, 8, 'e', 'r', 't', 'y', '5' }; Inflater inflate = new Inflater(false); byte outPutInf[] = new byte[500]; try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuff1); } inflate.inflate(outPutInf); } assertTrue( "the method finished() returned false when no more data needs to be decompressed", inflate.finished()); } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } for (int i = 0; i < byteArray.length; i++) { assertEquals( "Final decompressed data does not equal the original data", outPutInf[i], byteArray[i]); } assertEquals("final decompressed data contained more bytes than original - finished()", 0, outPutInf[byteArray.length]); } /** * @tests java.util.zip.Inflater#getAdler() */ public void test_getAdler() { // test method of java.util.zip.inflater.getAdler() byte dictionaryArray[] = { 'e', 'r', 't', 'a', 'b', 2, 3 }; Inflater inflateDiction = new Inflater(); inflateDiction.setInput(outPutDiction); if (inflateDiction.needsDictionary() == true) { // getting the checkSum value through the Adler32 class Adler32 adl = new Adler32(); adl.update(dictionaryArray); long checkSumR = adl.getValue(); assertEquals( "the checksum value returned by getAdler() is not the same as the checksum returned by creating the adler32 instance", inflateDiction.getAdler(), checkSumR); } } /** * @tests java.util.zip.Inflater#getRemaining() */ public void test_getRemaining() { // test method of java.util.zip.inflater.getRemaining() byte byteArray[] = { 1, 3, 5, 6, 7 }; Inflater inflate = new Inflater(); assertEquals("upon creating an instance of inflate, getRemaining returned a non zero value", 0, inflate.getRemaining()); inflate.setInput(byteArray); assertTrue( "getRemaining returned zero when there is input in the input buffer", inflate.getRemaining() != 0); } /** * @tests java.util.zip.Inflater#getTotalIn() */ public void test_getTotalIn() { // test method of java.util.zip.inflater.getTotalIn() // creating the decompressed data byte outPutBuf[] = new byte[500]; byte byteArray[] = { 1, 3, 4, 7, 8 }; byte outPutInf[] = new byte[500]; int x = 0; Deflater deflate = new Deflater(1); deflate.setInput(byteArray); while (!(deflate.needsInput())) { x += deflate.deflate(outPutBuf, x, outPutBuf.length - x); } deflate.finish(); while (!(deflate.finished())) { x = x + deflate.deflate(outPutBuf, x, outPutBuf.length - x); } Inflater inflate = new Inflater(); try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuf); } inflate.inflate(outPutInf); } } catch (DataFormatException e) { fail("Input to inflate is invalid or corrupted - getTotalIn"); } // System.out.print(deflate.getTotalOut() + " " + inflate.getTotalIn()); assertEquals( "the total byte in outPutBuf did not equal the byte returned in getTotalIn", deflate.getTotalOut(), inflate.getTotalIn()); Inflater inflate2 = new Inflater(); int offSet = 0;// seems only can start as 0 int length = 4; try { // seems no while loops allowed if (inflate2.needsInput()) { inflate2.setInput(outPutBuff1, offSet, length); } inflate2.inflate(outPutInf); } catch (DataFormatException e) { fail("Input to inflate is invalid or corrupted - getTotalIn"); } // System.out.print(inflate2.getTotalIn() + " " + length); assertEquals( "total byte dictated by length did not equal byte returned in getTotalIn", length, inflate2.getTotalIn()); } /** * @tests java.util.zip.Inflater#getTotalOut() */ public void test_getTotalOut() { // test method of java.util.zip.inflater.Inflater() // creating the decompressed data byte outPutBuf[] = new byte[500]; byte byteArray[] = { 1, 3, 4, 7, 8 }; int y = 0; int x = 0; Deflater deflate = new Deflater(1); deflate.setInput(byteArray); while (!(deflate.needsInput())) { x += deflate.deflate(outPutBuf, x, outPutBuf.length - x); } deflate.finish(); while (!(deflate.finished())) { x = x + deflate.deflate(outPutBuf, x, outPutBuf.length - x); } Inflater inflate = new Inflater(); byte outPutInf[] = new byte[500]; try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuf); } y += inflate.inflate(outPutInf); } } catch (DataFormatException e) { fail("Input to inflate is invalid or corrupted - getTotalIn"); } assertEquals( "the sum of the bytes returned from inflate does not equal the bytes of getTotalOut()", inflate.getTotalOut(), y); assertEquals( "the total number of bytes to be compressed does not equal the total bytes decompressed", deflate.getTotalIn(), inflate.getTotalOut()); // testing inflate(byte,int,int) inflate.reset(); y = 0; int offSet = 0;// seems only can start as 0 int length = 4; try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuf); } y += inflate.inflate(outPutInf, offSet, length); } } catch (DataFormatException e) { System.out .println("Input to inflate is invalid or corrupted - getTotalIn"); } assertEquals( "the sum of the bytes returned from inflate does not equal the bytes of getTotalOut()", y, inflate.getTotalOut()); assertEquals( "the total number of bytes to be compressed does not equal the total bytes decompressed", deflate.getTotalIn(), inflate.getTotalOut()); } /** * @tests java.util.zip.Inflater#inflate(byte[]) */ public void test_inflate$B() { // test method of java.util.zip.inflater.inflate(byte) byte byteArray[] = { 1, 3, 4, 7, 8, 'e', 'r', 't', 'y', '5' }; byte outPutInf[] = new byte[500]; Inflater inflate = new Inflater(); try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuff1); } inflate.inflate(outPutInf); } } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } for (int i = 0; i < byteArray.length; i++) { assertEquals( "Final decompressed data does not equal the original data", byteArray[i], outPutInf[i]); } assertEquals("final decompressed data contained more bytes than original - inflateB", 0, outPutInf[byteArray.length]); // testing for an empty input array byte outPutBuf[] = new byte[500]; byte emptyArray[] = new byte[11]; int x = 0; Deflater defEmpty = new Deflater(3); defEmpty.setInput(emptyArray); while (!(defEmpty.needsInput())) { x += defEmpty.deflate(outPutBuf, x, outPutBuf.length - x); } defEmpty.finish(); while (!(defEmpty.finished())) { x += defEmpty.deflate(outPutBuf, x, outPutBuf.length - x); } assertEquals( "the total number of byte from deflate did not equal getTotalOut - inflate(byte)", x, defEmpty.getTotalOut()); assertEquals( "the number of input byte from the array did not correspond with getTotalIn - inflate(byte)", emptyArray.length, defEmpty.getTotalIn()); Inflater infEmpty = new Inflater(); try { while (!(infEmpty.finished())) { if (infEmpty.needsInput()) { infEmpty.setInput(outPutBuf); } infEmpty.inflate(outPutInf); } } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } for (int i = 0; i < emptyArray.length; i++) { assertEquals( "Final decompressed data does not equal the original data", emptyArray[i], outPutInf[i]); assertEquals("Final decompressed data does not equal zero", 0, outPutInf[i]); } assertEquals("Final decompressed data contains more element than original data", 0, outPutInf[emptyArray.length]); } public void test_inflate$B1() { byte codedData[] = { 120, -38, 75, -54, 73, -52, 80, 40, 46, 41, -54, -52, 75, 87, 72, -50, -49, 43, 73, -52, -52, 43, 86, 72, 2, 10, 34, 99, -123, -60, -68, 20, -80, 32, 0, -101, -69, 17, 84}; String codedString = "blah string contains blahblahblahblah and blah"; Inflater infl1 = new Inflater(); Inflater infl2 = new Inflater(); byte[] result = new byte[100]; int decLen = 0; infl1.setInput(codedData, 0, codedData.length); try { decLen = infl1.inflate(result); } catch (DataFormatException e) { fail("Unexpected DataFormatException"); } infl1.end(); assertEquals(codedString, new String(result, 0, decLen)); codedData[5] = 0; infl2.setInput(codedData, 0, codedData.length); try { decLen = infl2.inflate(result); fail("Expected DataFormatException"); } catch (DataFormatException e) { // expected } infl2.end(); } /** * @tests java.util.zip.Inflater#inflate(byte[], int, int) */ public void test_inflate$BII() { // test method of java.util.zip.inflater.inflate(byte,int,int) byte byteArray[] = { 1, 3, 4, 7, 8, 'e', 'r', 't', 'y', '5' }; byte outPutInf[] = new byte[100]; int y = 0; Inflater inflate = new Inflater(); try { while (!(inflate.finished())) { if (inflate.needsInput()) { assertEquals(0, inflate.inflate(outPutInf, 0, 1)); inflate.setInput(outPutBuff1); } y += inflate.inflate(outPutInf, y, outPutInf.length - y); } } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } for (int i = 0; i < byteArray.length; i++) { assertEquals( "Final decompressed data does not equal the original data", byteArray[i], outPutInf[i]); } assertEquals("final decompressed data contained more bytes than original - inflateB", 0, outPutInf[byteArray.length]); // test boundary checks inflate.reset(); int r = 0; int offSet = 0; int lengthError = 101; try { if (inflate.needsInput()) { inflate.setInput(outPutBuff1); } inflate.inflate(outPutInf, offSet, lengthError); } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } catch (ArrayIndexOutOfBoundsException e) { r = 1; } assertEquals("out of bounds error did not get caught", 1, r); try { assertEquals(0, inflate.inflate(outPutInf, offSet, 0)); } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } inflate.end(); try { inflate.inflate(outPutInf, offSet, 1); fail("IllegalStateException expected"); } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } catch (IllegalStateException e) { //expected } } public void test_inflate$BII1() { byte codedData[] = { 120, -38, 75, -54, 73, -52, 80, 40, 46, 41, -54, -52, 75, 87, 72, -50, -49, 43, 73, -52, -52, 43, 86, 72, 2, 10, 34, 99, -123, -60, -68, 20, -80, 32, 0, -101, -69, 17, 84}; String codedString = "blah string"; Inflater infl1 = new Inflater(); Inflater infl2 = new Inflater(); byte[] result = new byte[100]; int decLen = 0; infl1.setInput(codedData, 0, codedData.length); try { decLen = infl1.inflate(result, 10, 11); } catch (DataFormatException e) { fail("Unexpected DataFormatException"); } infl1.end(); assertEquals(codedString, new String(result, 10, decLen)); codedData[5] = 0; infl2.setInput(codedData, 0, codedData.length); try { decLen = infl2.inflate(result, 10, 11); fail("Expected DataFormatException"); } catch (DataFormatException e) { // expected } infl2.end(); } /* * Regression test for HARMONY-6637 */ public void testInflateZero() throws Exception { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream( byteArrayOutputStream); deflaterOutputStream.close(); byte[] input = byteArrayOutputStream.toByteArray(); Inflater inflater = new Inflater(); inflater.setInput(input); byte[] buffer = new byte[0]; int numRead = 0; while (!inflater.finished()) { int inflatedChunkSize = inflater.inflate(buffer, numRead, buffer.length - numRead); numRead += inflatedChunkSize; } inflater.end(); } /** * @tests java.util.zip.Inflater#Inflater() */ public void test_Constructor() { // test method of java.util.zip.inflater.Inflater() Inflater inflate = new Inflater(); assertNotNull("failed to create the instance of inflater", inflate); } /** * @tests java.util.zip.Inflater#Inflater(boolean) */ public void test_ConstructorZ() { // test method of java.util.zip.inflater.Inflater(boolean) // note does not throw exception if deflater has a header, but inflater // doesn't or vice versa. byte byteArray[] = { 1, 3, 4, 7, 8, 'e', 'r', 't', 'y', '5' }; Inflater inflate = new Inflater(true); assertNotNull("failed to create the instance of inflater", inflate); byte outPutInf[] = new byte[500]; int r = 0; try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuff1); } inflate.inflate(outPutInf); } for (int i = 0; i < byteArray.length; i++) { assertEquals("the output array from inflate should contain 0 because the header of inflate and deflate did not match, but this failed", 0, outPutBuff1[i]); } } catch (DataFormatException e) { r = 1; } assertEquals("Error: exception should be thrown because of header inconsistency", 1, r); } /** * @tests java.util.zip.Inflater#needsDictionary() */ public void test_needsDictionary() { // test method of java.util.zip.inflater.needsDictionary() // note: this flag is set after inflate is called byte outPutInf[] = new byte[500]; // testing with dictionary set. Inflater inflateDiction = new Inflater(); if (inflateDiction.needsInput()) { inflateDiction.setInput(outPutDiction); } try { assertEquals("should return 0 because needs dictionary", 0, inflateDiction.inflate(outPutInf)); } catch (DataFormatException e) { fail("Should not cause exception"); } assertTrue( "method needsDictionary returned false when dictionary was used in deflater", inflateDiction.needsDictionary()); // testing without dictionary Inflater inflate = new Inflater(); try { inflate.setInput(outPutBuff1); inflate.inflate(outPutInf); assertFalse( "method needsDictionary returned true when dictionary was not used in deflater", inflate.needsDictionary()); } catch (DataFormatException e) { fail( "Input to inflate is invalid or corrupted - needsDictionary"); } // Regression test for HARMONY-86 Inflater inf = new Inflater(); assertFalse(inf.needsDictionary()); assertEquals(0,inf.getTotalIn()); assertEquals(0,inf.getTotalOut()); assertEquals(0,inf.getBytesRead()); assertEquals(0,inf.getBytesWritten()); assertEquals(1, inf.getAdler()); } /** * @tests java.util.zip.Inflater#needsInput() */ public void test_needsInput() { // test method of java.util.zip.inflater.needsInput() Inflater inflate = new Inflater(); assertTrue( "needsInput give the wrong boolean value as a result of no input buffer", inflate.needsInput()); byte byteArray[] = { 2, 3, 4, 't', 'y', 'u', 'e', 'w', 7, 6, 5, 9 }; inflate.setInput(byteArray); assertFalse( "methodNeedsInput returned true when the input buffer is full", inflate.needsInput()); inflate.reset(); byte byteArrayEmpty[] = new byte[0]; inflate.setInput(byteArrayEmpty); assertTrue( "needsInput give wrong boolean value as a result of an empty input buffer", inflate.needsInput()); } /** * @tests java.util.zip.Inflater#reset() */ public void test_reset() { // test method of java.util.zip.inflater.reset() byte byteArray[] = { 1, 3, 4, 7, 8, 'e', 'r', 't', 'y', '5' }; byte outPutInf[] = new byte[100]; int y = 0; Inflater inflate = new Inflater(); try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuff1); } y += inflate.inflate(outPutInf, y, outPutInf.length - y); } } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } for (int i = 0; i < byteArray.length; i++) { assertEquals( "Final decompressed data does not equal the original data", byteArray[i], outPutInf[i]); } assertEquals("final decompressed data contained more bytes than original - reset", 0, outPutInf[byteArray.length]); // testing that resetting the inflater will also return the correct // decompressed data inflate.reset(); try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutBuff1); } inflate.inflate(outPutInf); } } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } for (int i = 0; i < byteArray.length; i++) { assertEquals( "Final decompressed data does not equal the original data", byteArray[i], outPutInf[i]); } assertEquals("final decompressed data contained more bytes than original - reset", 0, outPutInf[byteArray.length]); } /** * @tests java.util.zip.Inflater#setDictionary(byte[]) */ public void test_setDictionary$B() { //FIXME This test doesn't pass in Harmony classlib or Sun 5.0_7 RI /* // test method of java.util.zip.inflater.setDictionary(byte) byte dictionaryArray[] = { 'e', 'r', 't', 'a', 'b', 2, 3 }; byte byteArray[] = { 4, 5, 3, 2, 'a', 'b', 6, 7, 8, 9, 0, 's', '3', 'w', 'r' }; byte outPutInf[] = new byte[100]; // trying to inflate without setting a dictionary Inflater inflateWO = new Inflater(); byte outPutInf2[] = new byte[100]; int r = 0; try { while (!(inflateWO.finished())) { if (inflateWO.needsInput()) { inflateWO.setInput(outPutDiction); } inflateWO.inflate(outPutInf2); } } catch (DataFormatException e) { r = 1; } assertEquals("invalid input to be decompressed due to dictionary not set", 1, r); // now setting the dictionary in inflater Inflater inflate = new Inflater(); try { while (!(inflate.finished())) { if (inflate.needsInput()) { inflate.setInput(outPutDiction); } if (inflate.needsDictionary()) { inflate.setDictionary(dictionaryArray); } inflate.inflate(outPutInf); } } catch (DataFormatException e) { fail("Invalid input to be decompressed"); } for (int i = 0; i < byteArray.length; i++) { assertTrue( "Final decompressed data does not equal the original data", byteArray[i] == outPutInf[i]); } assertEquals("final decompressed data contained more bytes than original - deflateB", 0, outPutInf[byteArray.length]); */ } /** * @tests java.util.zip.Inflater#setInput(byte[]) */ public void test_setInput$B() { // test method of java.util.zip.inflater.setInput(byte) byte byteArray[] = { 2, 3, 4, 't', 'y', 'u', 'e', 'w', 7, 6, 5, 9 }; Inflater inflate = new Inflater(); inflate.setInput(byteArray); assertTrue("setInputB did not deliver any byte to the input buffer", inflate.getRemaining() != 0); } /** * @tests java.util.zip.Inflater#setInput(byte[], int, int) */ public void test_setInput$BII() { // test method of java.util.zip.inflater.setInput(byte,int,int) byte byteArray[] = { 2, 3, 4, 't', 'y', 'u', 'e', 'w', 7, 6, 5, 9 }; int offSet = 6; int length = 6; Inflater inflate = new Inflater(); inflate.setInput(byteArray, offSet, length); assertEquals( "setInputBII did not deliver the right number of bytes to the input buffer", length, inflate.getRemaining()); // boundary check inflate.reset(); int r = 0; try { inflate.setInput(byteArray, 100, 100); } catch (ArrayIndexOutOfBoundsException e) { r = 1; } assertEquals("boundary check is not present for setInput", 1, r); } @Override protected void setUp() { try { java.io.InputStream infile = Support_Resources .getStream("hyts_compressD.bin"); BufferedInputStream inflatIP = new BufferedInputStream(infile); inflatIP.read(outPutBuff1, 0, outPutBuff1.length); inflatIP.close(); java.io.InputStream infile2 = Support_Resources .getStream("hyts_compDiction.bin"); BufferedInputStream inflatIP2 = new BufferedInputStream(infile2); inflatIP2.read(outPutDiction, 0, outPutDiction.length); inflatIP2.close(); } catch (FileNotFoundException e) { fail( "input file to test InflaterInputStream constructor is not found"); } catch (ZipException e) { fail( "read() threw an zip exception while testing constructor"); } catch (IOException e) { fail("read() threw an exception while testing constructor"); } } @Override protected void tearDown() { } /** * @tests java.util.zip.Deflater#getBytesRead() */ public void test_getBytesRead() throws DataFormatException, UnsupportedEncodingException { // Regression test for HARMONY-158 Deflater def = new Deflater(); Inflater inf = new Inflater(); assertEquals(0, def.getTotalIn()); assertEquals(0, def.getTotalOut()); assertEquals(0, def.getBytesRead()); // Encode a String into bytes String inputString = "blahblahblah??"; byte[] input = inputString.getBytes("UTF-8"); // Compress the bytes byte[] output = new byte[100]; def.setInput(input); def.finish(); def.deflate(output); inf.setInput(output); int compressedDataLength =inf.inflate(input); assertEquals(16, inf.getTotalIn()); assertEquals(compressedDataLength, inf.getTotalOut()); assertEquals(16, inf.getBytesRead()); } /** * @tests java.util.zip.Deflater#getBytesRead() */ public void test_getBytesWritten() throws DataFormatException, UnsupportedEncodingException { // Regression test for HARMONY-158 Deflater def = new Deflater(); Inflater inf = new Inflater(); assertEquals(0, def.getTotalIn()); assertEquals(0, def.getTotalOut()); assertEquals(0, def.getBytesWritten()); // Encode a String into bytes String inputString = "blahblahblah??"; byte[] input = inputString.getBytes("UTF-8"); // Compress the bytes byte[] output = new byte[100]; def.setInput(input); def.finish(); def.deflate(output); inf.setInput(output); int compressedDataLength =inf.inflate(input); assertEquals(16, inf.getTotalIn()); assertEquals(compressedDataLength, inf.getTotalOut()); assertEquals(14, inf.getBytesWritten()); } /** * @tests java.util.zip.Deflater#inflate(byte[], int, int) */ public void testInflate() throws Exception { // Regression for HARMONY-81 Inflater inf = new Inflater(); int res = inf.inflate(new byte[0], 0, 0); assertEquals(0, res); // Regression for HARMONY-2508 Inflater inflater = new Inflater(); byte[] b = new byte[1024]; assertEquals(0, inflater.inflate(b)); inflater.end(); // Regression for HARMONY-2510 inflater = new Inflater(); inflater.setInput(new byte[] { -1 }); try { inflater.inflate(b); // The RI detects malformed data on the malformed input { -1 }. Both // this implementation and the native zlib API return "need input" // on that data. This is an error if the stream is exhausted, but // not one that results in an exception in the Inflater API. assertTrue(inflater.needsInput()); } catch (DataFormatException e) { // expected } inflater = new Inflater(); inflater.setInput(new byte[] { -1, -1, -1 }); try { inflater.inflate(b); } catch (DataFormatException e) { // expected } } public void testSetDictionary$B() throws Exception { int i = 0; String inputString = "blah string contains blahblahblahblah and blah"; String dictionary1 = "blah"; String dictionary2 = "1234"; byte[] outputNo = new byte[100]; byte[] output1 = new byte[100]; byte[] output2 = new byte[100]; Deflater defDictNo = new Deflater(9); Deflater defDict1 = new Deflater(9); Deflater defDict2 = new Deflater(9); defDict1.setDictionary(dictionary1.getBytes()); defDict2.setDictionary(dictionary2.getBytes()); defDictNo.setInput(inputString.getBytes()); defDict1.setInput(inputString.getBytes()); defDict2.setInput(inputString.getBytes()); defDictNo.finish(); defDict1.finish(); defDict2.finish(); int dataLenNo = defDictNo.deflate(outputNo); int dataLen1 = defDict1.deflate(output1); int dataLen2 = defDict2.deflate(output2); boolean passNo1 = false; boolean passNo2 = false; boolean pass12 = false; for (i = 0; i < (dataLenNo < dataLen1 ? dataLenNo : dataLen1); i++) { if (outputNo[i] != output1[i]) { passNo1 = true; break; } } for (i = 0; i < (dataLenNo < dataLen1 ? dataLenNo : dataLen2); i++) { if (outputNo[i] != output2[i]) { passNo2 = true; break; } } for (i = 0; i < (dataLen1 < dataLen2 ? dataLen1 : dataLen2); i++) { if (output1[i] != output2[i]) { pass12 = true; break; } } assertTrue( "Compressed data the same for stream with dictionary and without it.", passNo1); assertTrue( "Compressed data the same for stream with dictionary and without it.", passNo2); assertTrue( "Compressed data the same for stream with different dictionaries.", pass12); Inflater inflNo = new Inflater(); Inflater infl1 = new Inflater(); Inflater infl2 = new Inflater(); byte[] result = new byte[100]; int decLen; inflNo.setInput(outputNo, 0, dataLenNo); decLen = inflNo.inflate(result); assertFalse(inflNo.needsDictionary()); inflNo.end(); assertEquals(inputString, new String(result, 0, decLen)); infl1.setInput(output1, 0, dataLen1); decLen = infl1.inflate(result); assertTrue(infl1.needsDictionary()); infl1.setDictionary(dictionary1.getBytes()); decLen = infl1.inflate(result); infl1.end(); assertEquals(inputString, new String(result, 0, decLen)); infl2.setInput(output2, 0, dataLen2); decLen = infl2.inflate(result); assertTrue(infl2.needsDictionary()); infl2.setDictionary(dictionary2.getBytes()); decLen = infl2.inflate(result); infl2.end(); assertEquals(inputString, new String(result, 0, decLen)); inflNo = new Inflater(); infl1 = new Inflater(); inflNo.setInput(outputNo, 0, dataLenNo); try { infl1.setDictionary(dictionary1.getBytes()); fail("IllegalArgumentException expected."); } catch (IllegalArgumentException ee) { // expected. } inflNo.end(); infl1.setInput(output1, 0, dataLen1); decLen = infl1.inflate(result); assertTrue(infl1.needsDictionary()); try { infl1.setDictionary(dictionary2.getBytes()); fail("IllegalArgumentException expected."); } catch (IllegalArgumentException ee) { // expected. } infl1.end(); try{ infl1.setDictionary(dictionary2.getBytes()); fail("IllegalStateException expected"); }catch(IllegalStateException ise){ //expected } } public void testSetDictionary$BII() throws Exception { int i = 0; String inputString = "blah string contains blahblahblahblah and blah"; String dictionary1 = "blah"; String dictionary2 = "blahblahblah"; byte[] output1 = new byte[100]; byte[] output2 = new byte[100]; byte[] output3 = new byte[100]; Deflater defDict1 = new Deflater(9); Deflater defDict2 = new Deflater(9); Deflater defDict3 = new Deflater(9); defDict1.setDictionary(dictionary1.getBytes()); defDict2.setDictionary(dictionary2.getBytes()); defDict3.setDictionary(dictionary2.getBytes(), 4, 4); defDict1.setInput(inputString.getBytes()); defDict2.setInput(inputString.getBytes()); defDict3.setInput(inputString.getBytes()); defDict1.finish(); defDict2.finish(); defDict3.finish(); int dataLen1 = defDict1.deflate(output1); int dataLen2 = defDict2.deflate(output2); int dataLen3 = defDict3.deflate(output3); boolean pass12 = false; boolean pass23 = false; boolean pass13 = true; for (i = 0; i < (dataLen1 < dataLen2 ? dataLen1 : dataLen2); i++) { if (output1[i] != output2[i]) { pass12 = true; break; } } for (i = 0; i < (dataLen2 < dataLen3 ? dataLen2 : dataLen3); i++) { if (output2[i] != output3[i]) { pass23 = true; break; } } for (i = 0; i < (dataLen1 < dataLen3 ? dataLen1 : dataLen3); i++) { if (output1[i] != output3[i]) { pass13 = false; break; } } assertTrue( "Compressed data the same for stream with different dictionaries.", pass12); assertTrue( "Compressed data the same for stream with different dictionaries.", pass23); assertTrue( "Compressed data the differs for stream with the same dictionaries.", pass13); Inflater infl1 = new Inflater(); Inflater infl2 = new Inflater(); Inflater infl3 = new Inflater(); Inflater infl4 = new Inflater(); byte[] result = new byte[100]; int decLen; infl1.setInput(output1, 0, dataLen1); decLen = infl1.inflate(result); assertTrue(infl1.needsDictionary()); infl1.setDictionary(dictionary2.getBytes(), 4, 4); decLen = infl1.inflate(result); infl1.end(); assertEquals(inputString, new String(result, 0, decLen)); infl2.setInput(output2, 0, dataLen2); decLen = infl2.inflate(result); assertTrue(infl2.needsDictionary()); try { infl2.setDictionary(dictionary1.getBytes()); fail("IllegalArgumentException expected."); } catch (IllegalArgumentException ee) { // expected } infl2.end(); infl3.setInput(output3, 0, dataLen3); decLen = infl3.inflate(result); assertTrue(infl3.needsDictionary()); infl3.setDictionary(dictionary1.getBytes()); decLen = infl3.inflate(result); infl3.end(); assertEquals(inputString, new String(result, 0, decLen)); //exception test infl4.setInput(output3, 0, dataLen3); decLen = infl4.inflate(result); assertTrue(infl4.needsDictionary()); try{ infl4.setDictionary(dictionary1.getBytes(), 4, 4); fail("ArrayIndexOutOfBoundsException expected"); }catch(ArrayIndexOutOfBoundsException aiob){ //expected } } public void testExceptions() throws Exception { byte byteArray[] = { 5, 2, 3, 7, 8 }; int r = 0; Inflater inflate = new Inflater(); inflate.setInput(byteArray); inflate.end(); try{ inflate.getAdler(); fail("IllegalStateException expected"); }catch(IllegalStateException ise){ //expected } try{ inflate.getBytesRead(); fail("NullPointerException expected"); }catch(NullPointerException ise){ //expected } try{ inflate.getBytesWritten(); fail("NullPointerException expected"); }catch(NullPointerException ise){ //expected } try{ inflate.getTotalIn(); fail("IllegalStateException expected"); }catch(IllegalStateException ise){ //expected } try{ inflate.getTotalOut(); fail("IllegalStateException expected"); }catch(IllegalStateException ise){ //expected } } }
openjdk/jdk8
37,252
jdk/src/solaris/classes/java/util/prefs/FileSystemPreferences.java
/* * Copyright (c) 2000, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.util.prefs; import java.util.*; import java.io.*; import java.security.AccessController; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.security.PrivilegedActionException; import sun.util.logging.PlatformLogger; /** * Preferences implementation for Unix. Preferences are stored in the file * system, with one directory per preferences node. All of the preferences * at each node are stored in a single file. Atomic file system operations * (e.g. File.renameTo) are used to ensure integrity. An in-memory cache of * the "explored" portion of the tree is maintained for performance, and * written back to the disk periodically. File-locking is used to ensure * reasonable behavior when multiple VMs are running at the same time. * (The file lock is obtained only for sync(), flush() and removeNode().) * * @author Josh Bloch * @see Preferences * @since 1.4 */ class FileSystemPreferences extends AbstractPreferences { /** * Sync interval in seconds. */ private static final int SYNC_INTERVAL = Math.max(1, Integer.parseInt( AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "java.util.prefs.syncInterval", "30")))); /** * Returns logger for error messages. Backing store exceptions are logged at * WARNING level. */ private static PlatformLogger getLogger() { return PlatformLogger.getLogger("java.util.prefs"); } /** * Directory for system preferences. */ private static File systemRootDir; /* * Flag, indicating whether systemRoot directory is writable */ private static boolean isSystemRootWritable; /** * Directory for user preferences. */ private static File userRootDir; /* * Flag, indicating whether userRoot directory is writable */ private static boolean isUserRootWritable; /** * The user root. */ static Preferences userRoot = null; static synchronized Preferences getUserRoot() { if (userRoot == null) { setupUserRoot(); userRoot = new FileSystemPreferences(true); } return userRoot; } private static void setupUserRoot() { AccessController.doPrivileged(new PrivilegedAction<Void>() { public Void run() { userRootDir = new File(System.getProperty("java.util.prefs.userRoot", System.getProperty("user.home")), ".java/.userPrefs"); // Attempt to create root dir if it does not yet exist. if (!userRootDir.exists()) { if (userRootDir.mkdirs()) { try { chmod(userRootDir.getCanonicalPath(), USER_RWX); } catch (IOException e) { getLogger().warning("Could not change permissions" + " on userRoot directory. "); } getLogger().info("Created user preferences directory."); } else getLogger().warning("Couldn't create user preferences" + " directory. User preferences are unusable."); } isUserRootWritable = userRootDir.canWrite(); String USER_NAME = System.getProperty("user.name"); userLockFile = new File (userRootDir,".user.lock." + USER_NAME); userRootModFile = new File (userRootDir, ".userRootModFile." + USER_NAME); if (!userRootModFile.exists()) try { // create if does not exist. userRootModFile.createNewFile(); // Only user can read/write userRootModFile. int result = chmod(userRootModFile.getCanonicalPath(), USER_READ_WRITE); if (result !=0) getLogger().warning("Problem creating userRoot " + "mod file. Chmod failed on " + userRootModFile.getCanonicalPath() + " Unix error code " + result); } catch (IOException e) { getLogger().warning(e.toString()); } userRootModTime = userRootModFile.lastModified(); return null; } }); } /** * The system root. */ static Preferences systemRoot; static synchronized Preferences getSystemRoot() { if (systemRoot == null) { setupSystemRoot(); systemRoot = new FileSystemPreferences(false); } return systemRoot; } private static void setupSystemRoot() { AccessController.doPrivileged(new PrivilegedAction<Void>() { public Void run() { String systemPrefsDirName = System.getProperty("java.util.prefs.systemRoot","/etc/.java"); systemRootDir = new File(systemPrefsDirName, ".systemPrefs"); // Attempt to create root dir if it does not yet exist. if (!systemRootDir.exists()) { // system root does not exist in /etc/.java // Switching to java.home systemRootDir = new File(System.getProperty("java.home"), ".systemPrefs"); if (!systemRootDir.exists()) { if (systemRootDir.mkdirs()) { getLogger().info( "Created system preferences directory " + "in java.home."); try { chmod(systemRootDir.getCanonicalPath(), USER_RWX_ALL_RX); } catch (IOException e) { } } else { getLogger().warning("Could not create " + "system preferences directory. System " + "preferences are unusable."); } } } isSystemRootWritable = systemRootDir.canWrite(); systemLockFile = new File(systemRootDir, ".system.lock"); systemRootModFile = new File (systemRootDir,".systemRootModFile"); if (!systemRootModFile.exists() && isSystemRootWritable) try { // create if does not exist. systemRootModFile.createNewFile(); int result = chmod(systemRootModFile.getCanonicalPath(), USER_RW_ALL_READ); if (result !=0) getLogger().warning("Chmod failed on " + systemRootModFile.getCanonicalPath() + " Unix error code " + result); } catch (IOException e) { getLogger().warning(e.toString()); } systemRootModTime = systemRootModFile.lastModified(); return null; } }); } /** * Unix user write/read permission */ private static final int USER_READ_WRITE = 0600; private static final int USER_RW_ALL_READ = 0644; private static final int USER_RWX_ALL_RX = 0755; private static final int USER_RWX = 0700; /** * The lock file for the user tree. */ static File userLockFile; /** * The lock file for the system tree. */ static File systemLockFile; /** * Unix lock handle for userRoot. * Zero, if unlocked. */ private static int userRootLockHandle = 0; /** * Unix lock handle for systemRoot. * Zero, if unlocked. */ private static int systemRootLockHandle = 0; /** * The directory representing this preference node. There is no guarantee * that this directory exits, as another VM can delete it at any time * that it (the other VM) holds the file-lock. While the root node cannot * be deleted, it may not yet have been created, or the underlying * directory could have been deleted accidentally. */ private final File dir; /** * The file representing this preference node's preferences. * The file format is undocumented, and subject to change * from release to release, but I'm sure that you can figure * it out if you try real hard. */ private final File prefsFile; /** * A temporary file used for saving changes to preferences. As part of * the sync operation, changes are first saved into this file, and then * atomically renamed to prefsFile. This results in an atomic state * change from one valid set of preferences to another. The * the file-lock is held for the duration of this transformation. */ private final File tmpFile; /** * File, which keeps track of global modifications of userRoot. */ private static File userRootModFile; /** * Flag, which indicated whether userRoot was modified by another VM */ private static boolean isUserRootModified = false; /** * Keeps track of userRoot modification time. This time is reset to * zero after UNIX reboot, and is increased by 1 second each time * userRoot is modified. */ private static long userRootModTime; /* * File, which keeps track of global modifications of systemRoot */ private static File systemRootModFile; /* * Flag, which indicates whether systemRoot was modified by another VM */ private static boolean isSystemRootModified = false; /** * Keeps track of systemRoot modification time. This time is reset to * zero after system reboot, and is increased by 1 second each time * systemRoot is modified. */ private static long systemRootModTime; /** * Locally cached preferences for this node (includes uncommitted * changes). This map is initialized with from disk when the first get or * put operation occurs on this node. It is synchronized with the * corresponding disk file (prefsFile) by the sync operation. The initial * value is read *without* acquiring the file-lock. */ private Map<String, String> prefsCache = null; /** * The last modification time of the file backing this node at the time * that prefCache was last synchronized (or initially read). This * value is set *before* reading the file, so it's conservative; the * actual timestamp could be (slightly) higher. A value of zero indicates * that we were unable to initialize prefsCache from the disk, or * have not yet attempted to do so. (If prefsCache is non-null, it * indicates the former; if it's null, the latter.) */ private long lastSyncTime = 0; /** * Unix error code for locked file. */ private static final int EAGAIN = 11; /** * Unix error code for denied access. */ private static final int EACCES = 13; /* Used to interpret results of native functions */ private static final int LOCK_HANDLE = 0; private static final int ERROR_CODE = 1; /** * A list of all uncommitted preference changes. The elements in this * list are of type PrefChange. If this node is concurrently modified on * disk by another VM, the two sets of changes are merged when this node * is sync'ed by overwriting our prefsCache with the preference map last * written out to disk (by the other VM), and then replaying this change * log against that map. The resulting map is then written back * to the disk. */ final List<Change> changeLog = new ArrayList<>(); /** * Represents a change to a preference. */ private abstract class Change { /** * Reapplies the change to prefsCache. */ abstract void replay(); }; /** * Represents a preference put. */ private class Put extends Change { String key, value; Put(String key, String value) { this.key = key; this.value = value; } void replay() { prefsCache.put(key, value); } } /** * Represents a preference remove. */ private class Remove extends Change { String key; Remove(String key) { this.key = key; } void replay() { prefsCache.remove(key); } } /** * Represents the creation of this node. */ private class NodeCreate extends Change { /** * Performs no action, but the presence of this object in changeLog * will force the node and its ancestors to be made permanent at the * next sync. */ void replay() { } } /** * NodeCreate object for this node. */ NodeCreate nodeCreate = null; /** * Replay changeLog against prefsCache. */ private void replayChanges() { for (int i = 0, n = changeLog.size(); i<n; i++) changeLog.get(i).replay(); } private static Timer syncTimer = new Timer(true); // Daemon Thread static { // Add periodic timer task to periodically sync cached prefs syncTimer.schedule(new TimerTask() { public void run() { syncWorld(); } }, SYNC_INTERVAL*1000, SYNC_INTERVAL*1000); // Add shutdown hook to flush cached prefs on normal termination AccessController.doPrivileged(new PrivilegedAction<Void>() { public Void run() { Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { syncTimer.cancel(); syncWorld(); } }); return null; } }); } private static void syncWorld() { /* * Synchronization necessary because userRoot and systemRoot are * lazily initialized. */ Preferences userRt; Preferences systemRt; synchronized(FileSystemPreferences.class) { userRt = userRoot; systemRt = systemRoot; } try { if (userRt != null) userRt.flush(); } catch(BackingStoreException e) { getLogger().warning("Couldn't flush user prefs: " + e); } try { if (systemRt != null) systemRt.flush(); } catch(BackingStoreException e) { getLogger().warning("Couldn't flush system prefs: " + e); } } private final boolean isUserNode; /** * Special constructor for roots (both user and system). This constructor * will only be called twice, by the static initializer. */ private FileSystemPreferences(boolean user) { super(null, ""); isUserNode = user; dir = (user ? userRootDir: systemRootDir); prefsFile = new File(dir, "prefs.xml"); tmpFile = new File(dir, "prefs.tmp"); } /** * Construct a new FileSystemPreferences instance with the specified * parent node and name. This constructor, called from childSpi, * is used to make every node except for the two //roots. */ private FileSystemPreferences(FileSystemPreferences parent, String name) { super(parent, name); isUserNode = parent.isUserNode; dir = new File(parent.dir, dirName(name)); prefsFile = new File(dir, "prefs.xml"); tmpFile = new File(dir, "prefs.tmp"); AccessController.doPrivileged(new PrivilegedAction<Void>() { public Void run() { newNode = !dir.exists(); return null; } }); if (newNode) { // These 2 things guarantee node will get wrtten at next flush/sync prefsCache = new TreeMap<>(); nodeCreate = new NodeCreate(); changeLog.add(nodeCreate); } } public boolean isUserNode() { return isUserNode; } protected void putSpi(String key, String value) { initCacheIfNecessary(); changeLog.add(new Put(key, value)); prefsCache.put(key, value); } protected String getSpi(String key) { initCacheIfNecessary(); return prefsCache.get(key); } protected void removeSpi(String key) { initCacheIfNecessary(); changeLog.add(new Remove(key)); prefsCache.remove(key); } /** * Initialize prefsCache if it has yet to be initialized. When this method * returns, prefsCache will be non-null. If the data was successfully * read from the file, lastSyncTime will be updated. If prefsCache was * null, but it was impossible to read the file (because it didn't * exist or for any other reason) prefsCache will be initialized to an * empty, modifiable Map, and lastSyncTime remain zero. */ private void initCacheIfNecessary() { if (prefsCache != null) return; try { loadCache(); } catch(Exception e) { // assert lastSyncTime == 0; prefsCache = new TreeMap<>(); } } /** * Attempt to load prefsCache from the backing store. If the attempt * succeeds, lastSyncTime will be updated (the new value will typically * correspond to the data loaded into the map, but it may be less, * if another VM is updating this node concurrently). If the attempt * fails, a BackingStoreException is thrown and both prefsCache and * lastSyncTime are unaffected by the call. */ private void loadCache() throws BackingStoreException { try { AccessController.doPrivileged( new PrivilegedExceptionAction<Void>() { public Void run() throws BackingStoreException { Map<String, String> m = new TreeMap<>(); long newLastSyncTime = 0; try { newLastSyncTime = prefsFile.lastModified(); try (FileInputStream fis = new FileInputStream(prefsFile)) { XmlSupport.importMap(fis, m); } } catch(Exception e) { if (e instanceof InvalidPreferencesFormatException) { getLogger().warning("Invalid preferences format in " + prefsFile.getPath()); prefsFile.renameTo( new File( prefsFile.getParentFile(), "IncorrectFormatPrefs.xml")); m = new TreeMap<>(); } else if (e instanceof FileNotFoundException) { getLogger().warning("Prefs file removed in background " + prefsFile.getPath()); } else { throw new BackingStoreException(e); } } // Attempt succeeded; update state prefsCache = m; lastSyncTime = newLastSyncTime; return null; } }); } catch (PrivilegedActionException e) { throw (BackingStoreException) e.getException(); } } /** * Attempt to write back prefsCache to the backing store. If the attempt * succeeds, lastSyncTime will be updated (the new value will correspond * exactly to the data thust written back, as we hold the file lock, which * prevents a concurrent write. If the attempt fails, a * BackingStoreException is thrown and both the backing store (prefsFile) * and lastSyncTime will be unaffected by this call. This call will * NEVER leave prefsFile in a corrupt state. */ private void writeBackCache() throws BackingStoreException { try { AccessController.doPrivileged( new PrivilegedExceptionAction<Void>() { public Void run() throws BackingStoreException { try { if (!dir.exists() && !dir.mkdirs()) throw new BackingStoreException(dir + " create failed."); try (FileOutputStream fos = new FileOutputStream(tmpFile)) { XmlSupport.exportMap(fos, prefsCache); } if (!tmpFile.renameTo(prefsFile)) throw new BackingStoreException("Can't rename " + tmpFile + " to " + prefsFile); } catch(Exception e) { if (e instanceof BackingStoreException) throw (BackingStoreException)e; throw new BackingStoreException(e); } return null; } }); } catch (PrivilegedActionException e) { throw (BackingStoreException) e.getException(); } } protected String[] keysSpi() { initCacheIfNecessary(); return prefsCache.keySet().toArray(new String[prefsCache.size()]); } protected String[] childrenNamesSpi() { return AccessController.doPrivileged( new PrivilegedAction<String[]>() { public String[] run() { List<String> result = new ArrayList<>(); File[] dirContents = dir.listFiles(); if (dirContents != null) { for (int i = 0; i < dirContents.length; i++) if (dirContents[i].isDirectory()) result.add(nodeName(dirContents[i].getName())); } return result.toArray(EMPTY_STRING_ARRAY); } }); } private static final String[] EMPTY_STRING_ARRAY = new String[0]; protected AbstractPreferences childSpi(String name) { return new FileSystemPreferences(this, name); } public void removeNode() throws BackingStoreException { synchronized (isUserNode()? userLockFile: systemLockFile) { // to remove a node we need an exclusive lock if (!lockFile(false)) throw(new BackingStoreException("Couldn't get file lock.")); try { super.removeNode(); } finally { unlockFile(); } } } /** * Called with file lock held (in addition to node locks). */ protected void removeNodeSpi() throws BackingStoreException { try { AccessController.doPrivileged( new PrivilegedExceptionAction<Void>() { public Void run() throws BackingStoreException { if (changeLog.contains(nodeCreate)) { changeLog.remove(nodeCreate); nodeCreate = null; return null; } if (!dir.exists()) return null; prefsFile.delete(); tmpFile.delete(); // dir should be empty now. If it's not, empty it File[] junk = dir.listFiles(); if (junk.length != 0) { getLogger().warning( "Found extraneous files when removing node: " + Arrays.asList(junk)); for (int i=0; i<junk.length; i++) junk[i].delete(); } if (!dir.delete()) throw new BackingStoreException("Couldn't delete dir: " + dir); return null; } }); } catch (PrivilegedActionException e) { throw (BackingStoreException) e.getException(); } } public synchronized void sync() throws BackingStoreException { boolean userNode = isUserNode(); boolean shared; if (userNode) { shared = false; /* use exclusive lock for user prefs */ } else { /* if can write to system root, use exclusive lock. otherwise use shared lock. */ shared = !isSystemRootWritable; } synchronized (isUserNode()? userLockFile:systemLockFile) { if (!lockFile(shared)) throw(new BackingStoreException("Couldn't get file lock.")); final Long newModTime = AccessController.doPrivileged( new PrivilegedAction<Long>() { public Long run() { long nmt; if (isUserNode()) { nmt = userRootModFile.lastModified(); isUserRootModified = userRootModTime == nmt; } else { nmt = systemRootModFile.lastModified(); isSystemRootModified = systemRootModTime == nmt; } return new Long(nmt); } }); try { super.sync(); AccessController.doPrivileged(new PrivilegedAction<Void>() { public Void run() { if (isUserNode()) { userRootModTime = newModTime.longValue() + 1000; userRootModFile.setLastModified(userRootModTime); } else { systemRootModTime = newModTime.longValue() + 1000; systemRootModFile.setLastModified(systemRootModTime); } return null; } }); } finally { unlockFile(); } } } protected void syncSpi() throws BackingStoreException { try { AccessController.doPrivileged( new PrivilegedExceptionAction<Void>() { public Void run() throws BackingStoreException { syncSpiPrivileged(); return null; } }); } catch (PrivilegedActionException e) { throw (BackingStoreException) e.getException(); } } private void syncSpiPrivileged() throws BackingStoreException { if (isRemoved()) throw new IllegalStateException("Node has been removed"); if (prefsCache == null) return; // We've never been used, don't bother syncing long lastModifiedTime; if ((isUserNode() ? isUserRootModified : isSystemRootModified)) { lastModifiedTime = prefsFile.lastModified(); if (lastModifiedTime != lastSyncTime) { // Prefs at this node were externally modified; read in node and // playback any local mods since last sync loadCache(); replayChanges(); lastSyncTime = lastModifiedTime; } } else if (lastSyncTime != 0 && !dir.exists()) { // This node was removed in the background. Playback any changes // against a virgin (empty) Map. prefsCache = new TreeMap<>(); replayChanges(); } if (!changeLog.isEmpty()) { writeBackCache(); // Creates directory & file if necessary /* * Attempt succeeded; it's barely possible that the call to * lastModified might fail (i.e., return 0), but this would not * be a disaster, as lastSyncTime is allowed to lag. */ lastModifiedTime = prefsFile.lastModified(); /* If lastSyncTime did not change, or went back * increment by 1 second. Since we hold the lock * lastSyncTime always monotonically encreases in the * atomic sense. */ if (lastSyncTime <= lastModifiedTime) { lastSyncTime = lastModifiedTime + 1000; prefsFile.setLastModified(lastSyncTime); } changeLog.clear(); } } public void flush() throws BackingStoreException { if (isRemoved()) return; sync(); } protected void flushSpi() throws BackingStoreException { // assert false; } /** * Returns true if the specified character is appropriate for use in * Unix directory names. A character is appropriate if it's a printable * ASCII character (> 0x1f && < 0x7f) and unequal to slash ('/', 0x2f), * dot ('.', 0x2e), or underscore ('_', 0x5f). */ private static boolean isDirChar(char ch) { return ch > 0x1f && ch < 0x7f && ch != '/' && ch != '.' && ch != '_'; } /** * Returns the directory name corresponding to the specified node name. * Generally, this is just the node name. If the node name includes * inappropriate characters (as per isDirChar) it is translated to Base64. * with the underscore character ('_', 0x5f) prepended. */ private static String dirName(String nodeName) { for (int i=0, n=nodeName.length(); i < n; i++) if (!isDirChar(nodeName.charAt(i))) return "_" + Base64.byteArrayToAltBase64(byteArray(nodeName)); return nodeName; } /** * Translate a string into a byte array by translating each character * into two bytes, high-byte first ("big-endian"). */ private static byte[] byteArray(String s) { int len = s.length(); byte[] result = new byte[2*len]; for (int i=0, j=0; i<len; i++) { char c = s.charAt(i); result[j++] = (byte) (c>>8); result[j++] = (byte) c; } return result; } /** * Returns the node name corresponding to the specified directory name. * (Inverts the transformation of dirName(String). */ private static String nodeName(String dirName) { if (dirName.charAt(0) != '_') return dirName; byte a[] = Base64.altBase64ToByteArray(dirName.substring(1)); StringBuffer result = new StringBuffer(a.length/2); for (int i = 0; i < a.length; ) { int highByte = a[i++] & 0xff; int lowByte = a[i++] & 0xff; result.append((char) ((highByte << 8) | lowByte)); } return result.toString(); } /** * Try to acquire the appropriate file lock (user or system). If * the initial attempt fails, several more attempts are made using * an exponential backoff strategy. If all attempts fail, this method * returns false. * @throws SecurityException if file access denied. */ private boolean lockFile(boolean shared) throws SecurityException{ boolean usernode = isUserNode(); int[] result; int errorCode = 0; File lockFile = (usernode ? userLockFile : systemLockFile); long sleepTime = INIT_SLEEP_TIME; for (int i = 0; i < MAX_ATTEMPTS; i++) { try { int perm = (usernode? USER_READ_WRITE: USER_RW_ALL_READ); result = lockFile0(lockFile.getCanonicalPath(), perm, shared); errorCode = result[ERROR_CODE]; if (result[LOCK_HANDLE] != 0) { if (usernode) { userRootLockHandle = result[LOCK_HANDLE]; } else { systemRootLockHandle = result[LOCK_HANDLE]; } return true; } } catch(IOException e) { // // If at first, you don't succeed... } try { Thread.sleep(sleepTime); } catch(InterruptedException e) { checkLockFile0ErrorCode(errorCode); return false; } sleepTime *= 2; } checkLockFile0ErrorCode(errorCode); return false; } /** * Checks if unlockFile0() returned an error. Throws a SecurityException, * if access denied. Logs a warning otherwise. */ private void checkLockFile0ErrorCode (int errorCode) throws SecurityException { if (errorCode == EACCES) throw new SecurityException("Could not lock " + (isUserNode()? "User prefs." : "System prefs.") + " Lock file access denied."); if (errorCode != EAGAIN) getLogger().warning("Could not lock " + (isUserNode()? "User prefs. " : "System prefs.") + " Unix error code " + errorCode + "."); } /** * Locks file using UNIX file locking. * @param fileName Absolute file name of the lock file. * @return Returns a lock handle, used to unlock the file. */ private static native int[] lockFile0(String fileName, int permission, boolean shared); /** * Unlocks file previously locked by lockFile0(). * @param lockHandle Handle to the file lock. * @return Returns zero if OK, UNIX error code if failure. */ private static native int unlockFile0(int lockHandle); /** * Changes UNIX file permissions. */ private static native int chmod(String fileName, int permission); /** * Initial time between lock attempts, in ms. The time is doubled * after each failing attempt (except the first). */ private static int INIT_SLEEP_TIME = 50; /** * Maximum number of lock attempts. */ private static int MAX_ATTEMPTS = 5; /** * Release the the appropriate file lock (user or system). * @throws SecurityException if file access denied. */ private void unlockFile() { int result; boolean usernode = isUserNode(); File lockFile = (usernode ? userLockFile : systemLockFile); int lockHandle = ( usernode ? userRootLockHandle:systemRootLockHandle); if (lockHandle == 0) { getLogger().warning("Unlock: zero lockHandle for " + (usernode ? "user":"system") + " preferences.)"); return; } result = unlockFile0(lockHandle); if (result != 0) { getLogger().warning("Could not drop file-lock on " + (isUserNode() ? "user" : "system") + " preferences." + " Unix error code " + result + "."); if (result == EACCES) throw new SecurityException("Could not unlock" + (isUserNode()? "User prefs." : "System prefs.") + " Lock file access denied."); } if (isUserNode()) { userRootLockHandle = 0; } else { systemRootLockHandle = 0; } } }
apache/hive
37,304
ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer.lineage; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.ForwardOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator; import org.apache.hadoop.hive.ql.exec.LimitOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.PTFOperator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.ScriptOperator; import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.hooks.LineageInfo; import org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo; import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency; import org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyType; import org.apache.hadoop.hive.ql.hooks.LineageInfo.Predicate; import org.apache.hadoop.hive.ql.hooks.LineageInfo.TableAliasInfo; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Utils; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.JoinCondDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef; import org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef; import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef; import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef; import org.apache.hadoop.hive.ql.udf.ptf.Noop; /** * Operator factory for the rule processors for lineage. */ public class OpProcFactory { /** * Returns the parent operator in the walk path to the current operator. * * @param stack The stack encoding the path. * * @return Operator The parent operator in the current path. */ @SuppressWarnings("unchecked") protected static Operator<? extends OperatorDesc> getParent(Stack<Node> stack) { return (Operator<? extends OperatorDesc>)Utils.getNthAncestor(stack, 1); } /** * Processor for Script and UDTF Operators. */ public static class TransformLineage extends DefaultLineage implements SemanticNodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // LineageCTx LineageCtx lCtx = (LineageCtx) procCtx; // The operators @SuppressWarnings("unchecked") Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd; Operator<? extends OperatorDesc> inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, op); // Create a single dependency list by concatenating the dependencies of all // the cols Dependency dep = new Dependency(); DependencyType newType = LineageInfo.DependencyType.SCRIPT; dep.setType(LineageInfo.DependencyType.SCRIPT); // TODO: Fix this to a non null value. dep.setExpr(null); LinkedHashSet<BaseColumnInfo> colSet = new LinkedHashSet<BaseColumnInfo>(); for(ColumnInfo ci : inpOp.getSchema().getSignature()) { Dependency d = lCtx.getIndex().getDependency(inpOp, ci); if (d != null) { newType = LineageCtx.getNewDependencyType(d.getType(), newType); if (!ci.isHiddenVirtualCol()) { colSet.addAll(d.getBaseCols()); } } } dep.setType(newType); dep.setBaseCols(colSet); boolean isScript = op instanceof ScriptOperator; // This dependency is then set for all the colinfos of the script operator for(ColumnInfo ci : op.getSchema().getSignature()) { Dependency d = dep; if (!isScript) { Dependency depCi = lCtx.getIndex().getDependency(inpOp, ci); if (depCi != null) { d = depCi; } } lCtx.getIndex().putDependency(op, ci, d); } return null; } } /** * Processor for TableScan Operator. This actually creates the base column mappings. */ public static class TableScanLineage extends DefaultLineage implements SemanticNodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // LineageCtx LineageCtx lCtx = (LineageCtx) procCtx; ParseContext pctx = lCtx.getParseCtx(); // Table scan operator. TableScanOperator top = (TableScanOperator)nd; org.apache.hadoop.hive.ql.metadata.Table t = top.getConf().getTableMetadata(); Table tab = t.getTTable(); // Generate the mappings RowSchema rs = top.getSchema(); List<FieldSchema> cols = t.getAllCols(); Map<String, FieldSchema> fieldSchemaMap = new HashMap<String, FieldSchema>(); for(FieldSchema col : cols) { fieldSchemaMap.put(col.getName(), col); } Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry().iterator(); while (vcs.hasNext()) { VirtualColumn vc = vcs.next(); fieldSchemaMap.put(vc.getName(), new FieldSchema(vc.getName(), vc.getTypeInfo().getTypeName(), "")); } TableAliasInfo tai = new TableAliasInfo(); tai.setAlias(top.getConf().getAlias()); tai.setTable(tab); for(ColumnInfo ci : rs.getSignature()) { // Create a dependency Dependency dep = new Dependency(); BaseColumnInfo bci = new BaseColumnInfo(); bci.setTabAlias(tai); bci.setColumn(fieldSchemaMap.get(ci.getInternalName())); // Populate the dependency dep.setType(LineageInfo.DependencyType.SIMPLE); dep.setBaseCols(new LinkedHashSet<BaseColumnInfo>()); dep.getBaseCols().add(bci); // Put the dependency in the map lCtx.getIndex().putDependency(top, ci, dep); } return null; } } /** * Processor for Join Operator. */ public static class JoinLineage extends DefaultLineage implements SemanticNodeProcessor { private final HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. assert(!stack.isEmpty()); // LineageCtx LineageCtx lCtx = (LineageCtx) procCtx; JoinOperator op = (JoinOperator)nd; JoinDesc jd = op.getConf(); // The input operator to the join is always a reduce sink operator ReduceSinkOperator inpOp = (ReduceSinkOperator)getParent(stack); lCtx.getIndex().copyPredicates(inpOp, op); Predicate cond = getPredicate(op, lCtx); if (cond != null) { lCtx.getIndex().addPredicate(op, cond); } ReduceSinkDesc rd = inpOp.getConf(); int tag = rd.getTag(); // Iterate over the outputs of the join operator and merge the // dependencies of the columns that corresponding to the tag. int cnt = 0; List<ExprNodeDesc> exprs = jd.getExprs().get((byte)tag); for(ColumnInfo ci : op.getSchema().getSignature()) { if (jd.getReversedExprs().get(ci.getInternalName()) != tag) { continue; } // Otherwise look up the expression corresponding to this ci ExprNodeDesc expr = exprs.get(cnt++); Dependency dependency = ExprProcFactory.getExprDependency(lCtx, inpOp, expr, outputMap); lCtx.getIndex().mergeDependency(op, ci, dependency); } return null; } private Predicate getPredicate(JoinOperator jop, LineageCtx lctx) { List<Operator<? extends OperatorDesc>> parentOperators = jop.getParentOperators(); JoinDesc jd = jop.getConf(); ExprNodeDesc [][] joinKeys = jd.getJoinKeys(); if (joinKeys == null || parentOperators == null || parentOperators.size() < 2) { return null; } LineageCtx.Index index = lctx.getIndex(); for (Operator<? extends OperatorDesc> op: parentOperators) { if (index.getDependencies(op) == null) { return null; } } Predicate cond = new Predicate(); JoinCondDesc[] conds = jd.getConds(); int parents = parentOperators.size(); StringBuilder sb = new StringBuilder("("); for (int i = 0; i < conds.length; i++) { if (i != 0) { sb.append(" AND "); } int left = conds[i].getLeft(); int right = conds[i].getRight(); if (joinKeys.length <= left || joinKeys[left].length == 0 || joinKeys.length <= right || joinKeys[right].length == 0 || parents < left || parents < right) { return null; } ExprNodeDesc expr = joinKeys[left][0]; Operator<? extends OperatorDesc> op = parentOperators.get(left); List<Operator<? extends OperatorDesc>> p = op.getParentOperators(); if (p == null || p.isEmpty()) { return null; } sb.append(ExprProcFactory.getExprString(op.getSchema(), expr, lctx, p.get(0), cond)); sb.append(" = "); expr = joinKeys[right][0]; op = parentOperators.get(right); p = op.getParentOperators(); if (p == null || p.isEmpty()) { return null; } sb.append(ExprProcFactory.getExprString(op.getSchema(), expr, lctx, p.get(0), cond)); } sb.append(")"); cond.setExpr(sb.toString()); return cond; } } /** * Processor for Join Operator. */ public static class LateralViewJoinLineage extends DefaultLineage implements SemanticNodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. assert(!stack.isEmpty()); // LineageCtx LineageCtx lCtx = (LineageCtx) procCtx; LateralViewJoinOperator op = (LateralViewJoinOperator)nd; boolean isUdtfPath = true; Operator<? extends OperatorDesc> inpOp = getParent(stack); List<ColumnInfo> cols = inpOp.getSchema().getSignature(); lCtx.getIndex().copyPredicates(inpOp, op); if (inpOp instanceof SelectOperator) { isUdtfPath = false; } // Dirty hack!! // For the select path the columns are the ones at the beginning of the // current operators schema and for the udtf path the columns are // at the end of the operator schema. List<ColumnInfo> outCols = op.getSchema().getSignature(); int outColsSize = outCols.size(); int colsSize = cols.size(); int outColOffset = isUdtfPath ? outColsSize - colsSize : 0; for (int cnt = 0; cnt < colsSize; cnt++) { ColumnInfo outCol = outCols.get(outColOffset + cnt); if (!outCol.isHiddenVirtualCol()) { ColumnInfo col = cols.get(cnt); lCtx.getIndex().mergeDependency(op, outCol, lCtx.getIndex().getDependency(inpOp, col)); } } return null; } } /** * Processor for Select operator. */ public static class SelectLineage extends DefaultLineage implements SemanticNodeProcessor { private final HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LineageCtx lctx = (LineageCtx)procCtx; SelectOperator sop = (SelectOperator)nd; // if this is a selStarNoCompute then this select operator // is treated like a default operator, so just call the super classes // process method. if (sop.getConf().isSelStarNoCompute()) { return super.process(nd, stack, procCtx, nodeOutputs); } // Otherwise we treat this as a normal select operator and look at // the expressions. Operator<? extends OperatorDesc> inpOp = getParent(stack); lctx.getIndex().copyPredicates(inpOp, sop); RowSchema rs = sop.getSchema(); List<ColumnInfo> colInfos = rs.getSignature(); int cnt = 0; for(ExprNodeDesc expr : sop.getConf().getColList()) { Dependency dep = ExprProcFactory.getExprDependency(lctx, inpOp, expr, outputMap); if (dep != null && dep.getExpr() == null && (dep.getBaseCols().isEmpty() || dep.getType() != LineageInfo.DependencyType.SIMPLE)) { dep.setExpr(ExprProcFactory.getExprString(rs, expr, lctx, inpOp, null)); } lctx.getIndex().putDependency(sop, colInfos.get(cnt++), dep); } Operator<? extends OperatorDesc> op = null; if (!sop.getChildOperators().isEmpty()) { op = sop.getChildOperators().get(0); if (!op.getChildOperators().isEmpty() && op instanceof LimitOperator) { op = op.getChildOperators().get(0); } } if (op == null || (op.getChildOperators().isEmpty() && op instanceof FileSinkOperator)) { lctx.getIndex().addFinalSelectOp(sop, op); } return null; } } /** * Processor for GroupBy operator. */ public static class GroupByLineage extends DefaultLineage implements SemanticNodeProcessor { private final HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LineageCtx lctx = (LineageCtx)procCtx; GroupByOperator gop = (GroupByOperator)nd; List<ColumnInfo> colInfos = gop.getSchema().getSignature(); Operator<? extends OperatorDesc> inpOp = getParent(stack); lctx.getIndex().copyPredicates(inpOp, gop); int cnt = 0; for(ExprNodeDesc expr : gop.getConf().getKeys()) { lctx.getIndex().putDependency(gop, colInfos.get(cnt++), ExprProcFactory.getExprDependency(lctx, inpOp, expr, outputMap)); } // If this is a reduce side GroupBy operator, check if there is // a corresponding map side one. If so, some expression could have // already been resolved in the map side. boolean reduceSideGop = (inpOp instanceof ReduceSinkOperator) && (Utils.getNthAncestor(stack, 2) instanceof GroupByOperator); RowSchema rs = gop.getSchema(); for(AggregationDesc agg : gop.getConf().getAggregators()) { // Concatenate the dependencies of all the parameters to // create the new dependency Dependency dep = new Dependency(); DependencyType newType = LineageInfo.DependencyType.EXPRESSION; StringBuilder sb = new StringBuilder(); boolean first = true; LinkedHashSet<BaseColumnInfo> bciSet = new LinkedHashSet<BaseColumnInfo>(); for(ExprNodeDesc expr : agg.getParameters()) { if (first) { first = false; } else { sb.append(", "); } Dependency exprDep = ExprProcFactory.getExprDependency(lctx, inpOp, expr, outputMap); if (exprDep != null && !exprDep.getBaseCols().isEmpty()) { newType = LineageCtx.getNewDependencyType(exprDep.getType(), newType); bciSet.addAll(exprDep.getBaseCols()); if (exprDep.getType() == LineageInfo.DependencyType.SIMPLE) { BaseColumnInfo col = exprDep.getBaseCols().iterator().next(); Table t = col.getTabAlias().getTable(); if (t != null) { sb.append(Warehouse.getQualifiedName(t)).append("."); } sb.append(col.getColumn().getName()); } } if (exprDep == null || exprDep.getBaseCols().isEmpty() || exprDep.getType() != LineageInfo.DependencyType.SIMPLE) { sb.append(exprDep != null && exprDep.getExpr() != null ? exprDep.getExpr() : ExprProcFactory.getExprString(rs, expr, lctx, inpOp, null)); } } String expr = sb.toString(); String udafName = agg.getGenericUDAFName(); if (!(reduceSideGop && expr.startsWith(udafName))) { sb.setLength(0); // reset the buffer sb.append(udafName); sb.append("("); if (agg.getDistinct()) { sb.append("DISTINCT "); } sb.append(expr); if (first) { // No parameter, count(*) sb.append("*"); } sb.append(")"); expr = sb.toString(); } dep.setExpr(expr); // If the bciSet is empty, this means that the inputs to this // aggregate function were all constants (e.g. count(1)). In this case // the aggregate function is just dependent on all the tables that are in // the dependency list of the input operator. if (bciSet.isEmpty()) { Set<TableAliasInfo> taiSet = new LinkedHashSet<TableAliasInfo>(); if (inpOp.getSchema() != null && inpOp.getSchema().getSignature() != null ) { for(ColumnInfo ci : inpOp.getSchema().getSignature()) { Dependency inpDep = lctx.getIndex().getDependency(inpOp, ci); // The dependency can be null as some of the input cis may not have // been set in case of joins. if (inpDep != null) { for(BaseColumnInfo bci : inpDep.getBaseCols()) { newType = LineageCtx.getNewDependencyType(inpDep.getType(), newType); taiSet.add(bci.getTabAlias()); } } } } // Create the BaseColumnInfos and set them in the bciSet for(TableAliasInfo tai : taiSet) { BaseColumnInfo bci = new BaseColumnInfo(); bci.setTabAlias(tai); // This is set to null to reflect that the dependency is not on any // particular column of the table. bci.setColumn(null); bciSet.add(bci); } } dep.setBaseCols(bciSet); dep.setType(newType); lctx.getIndex().putDependency(gop, colInfos.get(cnt++), dep); } return null; } } /** * Union processor. * In this case we call mergeDependency as opposed to putDependency * in order to account for visits from different parents. */ public static class UnionLineage extends DefaultLineage implements SemanticNodeProcessor { @SuppressWarnings("unchecked") @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. assert(!stack.isEmpty()); // LineageCtx LineageCtx lCtx = (LineageCtx) procCtx; Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd; // Get the row schema of the input operator. // The row schema of the parent operator Operator<? extends OperatorDesc> inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, op); RowSchema rs = op.getSchema(); List<ColumnInfo> inpCols = inpOp.getSchema().getSignature(); // check only for input cols for(ColumnInfo input : inpCols) { Dependency inpDep = lCtx.getIndex().getDependency(inpOp, input); if (inpDep != null) { //merge it with rs colInfo ColumnInfo ci = rs.getColumnInfo(input.getInternalName()); lCtx.getIndex().mergeDependency(op, ci, inpDep); } } return null; } } /** * ReduceSink processor. */ public static class ReduceSinkLineage implements SemanticNodeProcessor { private final HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. assert(!stack.isEmpty()); // LineageCtx LineageCtx lCtx = (LineageCtx) procCtx; ReduceSinkOperator rop = (ReduceSinkOperator)nd; Operator<? extends OperatorDesc> inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, rop); int cnt = 0; // The keys are included only in case the reduce sink feeds into // a group by operator through a chain of forward operators Operator<? extends OperatorDesc> op = rop.getChildOperators().get(0); while (op instanceof ForwardOperator) { op = op.getChildOperators().get(0); } if (op instanceof GroupByOperator) { List<ColumnInfo> colInfos = rop.getSchema().getSignature(); for(ExprNodeDesc expr : rop.getConf().getKeyCols()) { lCtx.getIndex().putDependency(rop, colInfos.get(cnt++), ExprProcFactory.getExprDependency(lCtx, inpOp, expr, outputMap)); } for(ExprNodeDesc expr : rop.getConf().getValueCols()) { lCtx.getIndex().putDependency(rop, colInfos.get(cnt++), ExprProcFactory.getExprDependency(lCtx, inpOp, expr, outputMap)); } } else { RowSchema schema = rop.getSchema(); ReduceSinkDesc desc = rop.getConf(); List<ExprNodeDesc> keyCols = desc.getKeyCols(); List<String> keyColNames = desc.getOutputKeyColumnNames(); for (int i = 0; i < keyCols.size(); i++) { // order-bys, joins ColumnInfo column = schema.getColumnInfo(Utilities.ReduceField.KEY + "." + keyColNames.get(i)); if (column == null) { continue; // key in values } lCtx.getIndex().putDependency(rop, column, ExprProcFactory.getExprDependency(lCtx, inpOp, keyCols.get(i), outputMap)); } List<ExprNodeDesc> valCols = desc.getValueCols(); List<String> valColNames = desc.getOutputValueColumnNames(); for (int i = 0; i < valCols.size(); i++) { // todo: currently, bucketing,etc. makes RS differently with those for order-bys or joins ColumnInfo column = schema.getColumnInfo(valColNames.get(i)); if (column == null) { // order-bys, joins column = schema.getColumnInfo(Utilities.ReduceField.VALUE + "." + valColNames.get(i)); } lCtx.getIndex().putDependency(rop, column, ExprProcFactory.getExprDependency(lCtx, inpOp, valCols.get(i), outputMap)); } } return null; } } /** * Filter processor. */ public static class FilterLineage implements SemanticNodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. assert(!stack.isEmpty()); // LineageCtx LineageCtx lCtx = (LineageCtx) procCtx; FilterOperator fop = (FilterOperator)nd; // Get the row schema of the input operator. // The row schema of the parent operator Operator<? extends OperatorDesc> inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, fop); FilterDesc filterDesc = fop.getConf(); RowSchema rs = fop.getSchema(); if (!filterDesc.isGenerated()) { Predicate cond = new Predicate(); cond.setExpr(ExprProcFactory.getExprString( rs, filterDesc.getPredicate(), lCtx, inpOp, cond)); lCtx.getIndex().addPredicate(fop, cond); } List<ColumnInfo> inpCols = inpOp.getSchema().getSignature(); int cnt = 0; for(ColumnInfo ci : rs.getSignature()) { lCtx.getIndex().putDependency(fop, ci, lCtx.getIndex().getDependency(inpOp, inpCols.get(cnt++))); } return null; } } /** * PTF processor */ public static class PTFLineage implements SemanticNodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // LineageCTx LineageCtx lCtx = (LineageCtx) procCtx; // The operators @SuppressWarnings("unchecked") PTFOperator op = (PTFOperator)nd; Operator<? extends OperatorDesc> inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, op); Dependency dep = new Dependency(); DependencyType newType = DependencyType.EXPRESSION; dep.setType(newType); Set<String> columns = new HashSet<>(); PartitionedTableFunctionDef funcDef = op.getConf().getFuncDef(); StringBuilder sb = new StringBuilder(); WindowFrameDef windowFrameDef = null; if (!(funcDef.getTFunction() instanceof Noop)) { if (funcDef instanceof WindowTableFunctionDef) { // function name WindowFunctionDef windowFunctionDef = ((WindowTableFunctionDef) funcDef).getWindowFunctions().getFirst(); sb.append(windowFunctionDef.getName()).append("("); addArgs(sb, columns, lCtx, inpOp, op.getSchema(), windowFunctionDef.getArgs()); windowFrameDef = windowFunctionDef.getWindowFrame(); if (sb.charAt(sb.length() - 2) == ',') { sb.delete(sb.length() - 2, sb.length()); } sb.append(")"); sb.append(" over ("); } else /* PartitionedTableFunctionDef */ { // function name sb.append(funcDef.getName()).append("("); addArgs(sb, columns, lCtx, inpOp, funcDef.getRawInputShape().getRr().getRowSchema(), funcDef.getArgs()); // matchpath has argument pattern like matchpath(<input expression>, <argument methods: arg1(), arg2()...>) if (funcDef.getInput() != null) { sb.append("on ").append(funcDef.getInput().getAlias()).append(" "); int counter = 1; for (PTFExpressionDef arg : funcDef.getArgs()) { ExprNodeDesc exprNode = arg.getExprNode(); addIfNotNull(columns, exprNode.getCols()); sb.append("arg").append(counter++).append("("); sb.append(ExprProcFactory.getExprString(funcDef.getRawInputShape().getRr().getRowSchema(), arg.getExprNode(), lCtx, inpOp, null)); sb.append("), "); } sb.delete(sb.length() - 2, sb.length()); } } } /* Collect partition by and distribute by information. Please note, at the expression node level, there is no difference between those. That means distribute by gets a string partition by in the expression string. */ if (funcDef.getPartition() != null ) { List<PTFExpressionDef> partitionExpressions = funcDef.getPartition().getExpressions(); boolean isPartitionByAdded = false; for (PTFExpressionDef partitionExpr : partitionExpressions) { ExprNodeDesc partitionExprNode = partitionExpr.getExprNode(); if (partitionExprNode.getCols() != null && !partitionExprNode.getCols().isEmpty()) { if (!isPartitionByAdded) { sb.append("partition by "); isPartitionByAdded = true; } addIfNotNull(columns, partitionExprNode.getCols()); if (partitionExprNode instanceof ExprNodeColumnDesc) { sb.append(ExprProcFactory.getExprString(funcDef.getRawInputShape().getRr().getRowSchema(), partitionExprNode, lCtx, inpOp, null)); sb.append(", "); } sb.delete(sb.length() - 2, sb.length()); } } } /* Collects the order by and sort by information. Please note, at the expression node level, there is no difference between those. That means sort by gets a string partition by in the expression string. */ if (funcDef.getOrder() != null) { /* Order by is sometimes added by the compiler to make the PTF call deterministic. At this point of the code execution, we don't know if it is added by the compiler or it was originally part of the query string. */ List<OrderExpressionDef> orderExpressions = funcDef.getOrder().getExpressions(); if (!sb.isEmpty() && sb.charAt(sb.length() - 1) != '(') { sb.append(" "); } sb.append("order by "); for (OrderExpressionDef orderExpr : orderExpressions) { ExprNodeDesc orderExprNode = orderExpr.getExprNode(); addIfNotNull(columns, orderExprNode.getCols()); sb.append(ExprProcFactory.getExprString(funcDef.getRawInputShape().getRr().getRowSchema(), orderExprNode, lCtx, inpOp, null)); if (PTFInvocationSpec.Order.DESC.equals(orderExpr.getOrder())) { sb.append(" desc"); } sb.append(", "); } sb.delete(sb.length() - 2, sb.length()); } /* Window frame is sometimes added by the compiler to make the PTF call deterministic. At this point of the code execution, we don't know if it is added by the compiler or it was originally part of the query string. */ if (windowFrameDef != null) { sb.append(" ").append(windowFrameDef.getWindowType()).append(" between "); appendBoundary(windowFrameDef.getStart(), sb, " preceding"); sb.append(" and "); appendBoundary(windowFrameDef.getEnd(), sb, " following"); } sb.append(")"); dep.setExpr(sb.toString()); LinkedHashSet<BaseColumnInfo> colSet = new LinkedHashSet<>(); for(ColumnInfo ci : inpOp.getSchema().getSignature()) { Dependency d = lCtx.getIndex().getDependency(inpOp, ci); if (d != null) { newType = LineageCtx.getNewDependencyType(d.getType(), newType); if (!ci.isHiddenVirtualCol() && columns.contains(ci.getInternalName())) { colSet.addAll(d.getBaseCols()); } } } dep.setType(newType); dep.setBaseCols(colSet); // This dependency is then set for all the colinfos of the script operator for(ColumnInfo ci : op.getSchema().getSignature()) { Dependency d = dep; Dependency depCi = lCtx.getIndex().getDependency(inpOp, ci); if (depCi != null) { d = depCi; } lCtx.getIndex().putDependency(op, ci, d); } return null; } private static void appendBoundary(BoundaryDef boundary, StringBuilder sb, String boundaryText) { if (boundary.isCurrentRow()) { sb.append("current_row"); } else { sb.append(boundary.isUnbounded() ? "unbounded" : boundary.getAmt() + boundaryText); } } /* Adds the PTF arguments for the lineage column list and also the expression string. */ private void addArgs( StringBuilder sb, Set<String> columns, LineageCtx lCtx, Operator<? extends OperatorDesc> inpOp, RowSchema rowSchema, List<PTFExpressionDef> args) { if (args == null || args.isEmpty()) { return; } for (PTFExpressionDef arg : args) { ExprNodeDesc argNode = arg.getExprNode(); if (argNode.getCols() != null && !argNode.getCols().isEmpty()) { addIfNotNull(columns, argNode.getCols()); } if (argNode instanceof ExprNodeConstantDesc) { boolean isString = "string".equals(argNode.getTypeInfo().getTypeName()); if (isString) { sb.append("'"); } sb.append(((ExprNodeConstantDesc) argNode).getValue()); if (isString) { sb.append("'"); } sb.append(", "); } else if (argNode instanceof ExprNodeColumnDesc || argNode instanceof ExprNodeGenericFuncDesc) { ExprNodeDesc exprNode = arg.getExprNode(); addIfNotNull(columns, exprNode.getCols()); sb.append(ExprProcFactory.getExprString(rowSchema, exprNode, lCtx, inpOp, null)); sb.append(", "); } } } private void addIfNotNull(Set<String> set, List<String> items) { if (items == null || items.isEmpty()) { return; } for (String item : items) { if (item != null) { set.add(item); } } } } /** * Default processor. This basically passes the input dependencies as such * to the output dependencies. */ public static class DefaultLineage implements SemanticNodeProcessor { @SuppressWarnings("unchecked") @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { // Assert that there is at least one item in the stack. This should never // be called for leafs. assert(!stack.isEmpty()); // LineageCtx LineageCtx lCtx = (LineageCtx) procCtx; Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd; // Get the row schema of the input operator. // The row schema of the parent operator Operator<? extends OperatorDesc> inpOp = getParent(stack); lCtx.getIndex().copyPredicates(inpOp, op); RowSchema rs = op.getSchema(); List<ColumnInfo> inpCols = inpOp.getSchema().getSignature(); int cnt = 0; for(ColumnInfo ci : rs.getSignature()) { lCtx.getIndex().putDependency(op, ci, lCtx.getIndex().getDependency(inpOp, inpCols.get(cnt++))); } return null; } } public static SemanticNodeProcessor getJoinProc() { return new JoinLineage(); } public static SemanticNodeProcessor getLateralViewJoinProc() { return new LateralViewJoinLineage(); } public static SemanticNodeProcessor getTSProc() { return new TableScanLineage(); } public static SemanticNodeProcessor getTransformProc() { return new TransformLineage(); } public static SemanticNodeProcessor getSelProc() { return new SelectLineage(); } public static SemanticNodeProcessor getGroupByProc() { return new GroupByLineage(); } public static SemanticNodeProcessor getUnionProc() { return new UnionLineage(); } public static SemanticNodeProcessor getReduceSinkProc() { return new ReduceSinkLineage(); } public static SemanticNodeProcessor getDefaultProc() { return new DefaultLineage(); } public static SemanticNodeProcessor getFilterProc() { return new FilterLineage(); } public static SemanticNodeProcessor getPTFProc() { return new PTFLineage(); } }
googleapis/google-cloud-java
37,130
java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/MemoryBankServiceClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.aiplatform.v1beta1; import static com.google.cloud.aiplatform.v1beta1.MemoryBankServiceClient.ListLocationsPagedResponse; import static com.google.cloud.aiplatform.v1beta1.MemoryBankServiceClient.ListMemoriesPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.iam.v1.AuditConfig; import com.google.iam.v1.Binding; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.GetPolicyOptions; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.ByteString; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class MemoryBankServiceClientTest { private static MockIAMPolicy mockIAMPolicy; private static MockLocations mockLocations; private static MockMemoryBankService mockMemoryBankService; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private MemoryBankServiceClient client; @BeforeClass public static void startStaticServer() { mockMemoryBankService = new MockMemoryBankService(); mockLocations = new MockLocations(); mockIAMPolicy = new MockIAMPolicy(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockMemoryBankService, mockLocations, mockIAMPolicy)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); MemoryBankServiceSettings settings = MemoryBankServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = MemoryBankServiceClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void createMemoryTest() throws Exception { Memory expectedResponse = Memory.newBuilder() .setName( MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setFact("fact3135084") .putAllScope(new HashMap<String, String>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("createMemoryTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockMemoryBankService.addResponse(resultOperation); CreateMemoryRequest request = CreateMemoryRequest.newBuilder() .setParent( ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]").toString()) .setMemory(Memory.newBuilder().build()) .build(); Memory actualResponse = client.createMemoryAsync(request).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateMemoryRequest actualRequest = ((CreateMemoryRequest) actualRequests.get(0)); Assert.assertEquals(request.getParent(), actualRequest.getParent()); Assert.assertEquals(request.getMemory(), actualRequest.getMemory()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createMemoryExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { CreateMemoryRequest request = CreateMemoryRequest.newBuilder() .setParent( ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]") .toString()) .setMemory(Memory.newBuilder().build()) .build(); client.createMemoryAsync(request).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void getMemoryTest() throws Exception { Memory expectedResponse = Memory.newBuilder() .setName( MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setFact("fact3135084") .putAllScope(new HashMap<String, String>()) .build(); mockMemoryBankService.addResponse(expectedResponse); MemoryName name = MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]"); Memory actualResponse = client.getMemory(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetMemoryRequest actualRequest = ((GetMemoryRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getMemoryExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { MemoryName name = MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]"); client.getMemory(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getMemoryTest2() throws Exception { Memory expectedResponse = Memory.newBuilder() .setName( MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setFact("fact3135084") .putAllScope(new HashMap<String, String>()) .build(); mockMemoryBankService.addResponse(expectedResponse); String name = "name3373707"; Memory actualResponse = client.getMemory(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetMemoryRequest actualRequest = ((GetMemoryRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getMemoryExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { String name = "name3373707"; client.getMemory(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateMemoryTest() throws Exception { Memory expectedResponse = Memory.newBuilder() .setName( MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]") .toString()) .setDisplayName("displayName1714148973") .setDescription("description-1724546052") .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setFact("fact3135084") .putAllScope(new HashMap<String, String>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateMemoryTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockMemoryBankService.addResponse(resultOperation); Memory memory = Memory.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); Memory actualResponse = client.updateMemoryAsync(memory, updateMask).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateMemoryRequest actualRequest = ((UpdateMemoryRequest) actualRequests.get(0)); Assert.assertEquals(memory, actualRequest.getMemory()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateMemoryExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { Memory memory = Memory.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateMemoryAsync(memory, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void listMemoriesTest() throws Exception { Memory responsesElement = Memory.newBuilder().build(); ListMemoriesResponse expectedResponse = ListMemoriesResponse.newBuilder() .setNextPageToken("") .addAllMemories(Arrays.asList(responsesElement)) .build(); mockMemoryBankService.addResponse(expectedResponse); ReasoningEngineName parent = ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]"); ListMemoriesPagedResponse pagedListResponse = client.listMemories(parent); List<Memory> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getMemoriesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListMemoriesRequest actualRequest = ((ListMemoriesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listMemoriesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { ReasoningEngineName parent = ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]"); client.listMemories(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listMemoriesTest2() throws Exception { Memory responsesElement = Memory.newBuilder().build(); ListMemoriesResponse expectedResponse = ListMemoriesResponse.newBuilder() .setNextPageToken("") .addAllMemories(Arrays.asList(responsesElement)) .build(); mockMemoryBankService.addResponse(expectedResponse); String parent = "parent-995424086"; ListMemoriesPagedResponse pagedListResponse = client.listMemories(parent); List<Memory> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getMemoriesList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListMemoriesRequest actualRequest = ((ListMemoriesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listMemoriesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { String parent = "parent-995424086"; client.listMemories(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteMemoryTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteMemoryTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockMemoryBankService.addResponse(resultOperation); MemoryName name = MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]"); client.deleteMemoryAsync(name).get(); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteMemoryRequest actualRequest = ((DeleteMemoryRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteMemoryExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { MemoryName name = MemoryName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]", "[MEMORY]"); client.deleteMemoryAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteMemoryTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteMemoryTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockMemoryBankService.addResponse(resultOperation); String name = "name3373707"; client.deleteMemoryAsync(name).get(); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteMemoryRequest actualRequest = ((DeleteMemoryRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteMemoryExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { String name = "name3373707"; client.deleteMemoryAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void generateMemoriesTest() throws Exception { GenerateMemoriesResponse expectedResponse = GenerateMemoriesResponse.newBuilder() .addAllGeneratedMemories(new ArrayList<GenerateMemoriesResponse.GeneratedMemory>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("generateMemoriesTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockMemoryBankService.addResponse(resultOperation); ReasoningEngineName parent = ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]"); GenerateMemoriesResponse actualResponse = client.generateMemoriesAsync(parent).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GenerateMemoriesRequest actualRequest = ((GenerateMemoriesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void generateMemoriesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { ReasoningEngineName parent = ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]"); client.generateMemoriesAsync(parent).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void generateMemoriesTest2() throws Exception { GenerateMemoriesResponse expectedResponse = GenerateMemoriesResponse.newBuilder() .addAllGeneratedMemories(new ArrayList<GenerateMemoriesResponse.GeneratedMemory>()) .build(); Operation resultOperation = Operation.newBuilder() .setName("generateMemoriesTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockMemoryBankService.addResponse(resultOperation); String parent = "parent-995424086"; GenerateMemoriesResponse actualResponse = client.generateMemoriesAsync(parent).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GenerateMemoriesRequest actualRequest = ((GenerateMemoriesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void generateMemoriesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { String parent = "parent-995424086"; client.generateMemoriesAsync(parent).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void retrieveMemoriesTest() throws Exception { RetrieveMemoriesResponse expectedResponse = RetrieveMemoriesResponse.newBuilder() .addAllRetrievedMemories(new ArrayList<RetrieveMemoriesResponse.RetrievedMemory>()) .setNextPageToken("nextPageToken-1386094857") .build(); mockMemoryBankService.addResponse(expectedResponse); ReasoningEngineName parent = ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]"); RetrieveMemoriesResponse actualResponse = client.retrieveMemories(parent); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); RetrieveMemoriesRequest actualRequest = ((RetrieveMemoriesRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void retrieveMemoriesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { ReasoningEngineName parent = ReasoningEngineName.of("[PROJECT]", "[LOCATION]", "[REASONING_ENGINE]"); client.retrieveMemories(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void retrieveMemoriesTest2() throws Exception { RetrieveMemoriesResponse expectedResponse = RetrieveMemoriesResponse.newBuilder() .addAllRetrievedMemories(new ArrayList<RetrieveMemoriesResponse.RetrievedMemory>()) .setNextPageToken("nextPageToken-1386094857") .build(); mockMemoryBankService.addResponse(expectedResponse); String parent = "parent-995424086"; RetrieveMemoriesResponse actualResponse = client.retrieveMemories(parent); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockMemoryBankService.getRequests(); Assert.assertEquals(1, actualRequests.size()); RetrieveMemoriesRequest actualRequest = ((RetrieveMemoriesRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void retrieveMemoriesExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockMemoryBankService.addException(exception); try { String parent = "parent-995424086"; client.retrieveMemories(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockLocations.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertEquals(request.getFilter(), actualRequest.getFilter()); Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize()); Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listLocationsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockLocations.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getLocationExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockIAMPolicy.addResponse(expectedResponse); SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); Policy actualResponse = client.setIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(request.getResource(), actualRequest.getResource()); Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy()); Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setIamPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIAMPolicy.addException(exception); try { SetIamPolicyRequest request = SetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName( "[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setPolicy(Policy.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); client.setIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getIamPolicyTest() throws Exception { Policy expectedResponse = Policy.newBuilder() .setVersion(351608024) .addAllBindings(new ArrayList<Binding>()) .addAllAuditConfigs(new ArrayList<AuditConfig>()) .setEtag(ByteString.EMPTY) .build(); mockIAMPolicy.addResponse(expectedResponse); GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); Policy actualResponse = client.getIamPolicy(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0)); Assert.assertEquals(request.getResource(), actualRequest.getResource()); Assert.assertEquals(request.getOptions(), actualRequest.getOptions()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getIamPolicyExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIAMPolicy.addException(exception); try { GetIamPolicyRequest request = GetIamPolicyRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName( "[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .setOptions(GetPolicyOptions.newBuilder().build()) .build(); client.getIamPolicy(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void testIamPermissionsTest() throws Exception { TestIamPermissionsResponse expectedResponse = TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build(); mockIAMPolicy.addResponse(expectedResponse); TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .addAllPermissions(new ArrayList<String>()) .build(); TestIamPermissionsResponse actualResponse = client.testIamPermissions(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests(); Assert.assertEquals(1, actualRequests.size()); TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0)); Assert.assertEquals(request.getResource(), actualRequest.getResource()); Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void testIamPermissionsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockIAMPolicy.addException(exception); try { TestIamPermissionsRequest request = TestIamPermissionsRequest.newBuilder() .setResource( EndpointName.ofProjectLocationEndpointName( "[PROJECT]", "[LOCATION]", "[ENDPOINT]") .toString()) .addAllPermissions(new ArrayList<String>()) .build(); client.testIamPermissions(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
37,156
java-gsuite-addons/proto-google-cloud-gsuite-addons-v1/src/main/java/com/google/cloud/gsuiteaddons/v1/ListDeploymentsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gsuiteaddons/v1/gsuiteaddons.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.gsuiteaddons.v1; /** * * * <pre> * Response message to list deployments. * </pre> * * Protobuf type {@code google.cloud.gsuiteaddons.v1.ListDeploymentsResponse} */ public final class ListDeploymentsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.gsuiteaddons.v1.ListDeploymentsResponse) ListDeploymentsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListDeploymentsResponse.newBuilder() to construct. private ListDeploymentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDeploymentsResponse() { deployments_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDeploymentsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gsuiteaddons.v1.GSuiteAddOnsProto .internal_static_google_cloud_gsuiteaddons_v1_ListDeploymentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gsuiteaddons.v1.GSuiteAddOnsProto .internal_static_google_cloud_gsuiteaddons_v1_ListDeploymentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse.class, com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse.Builder.class); } public static final int DEPLOYMENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.gsuiteaddons.v1.Deployment> deployments_; /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.gsuiteaddons.v1.Deployment> getDeploymentsList() { return deployments_; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.gsuiteaddons.v1.DeploymentOrBuilder> getDeploymentsOrBuilderList() { return deployments_; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ @java.lang.Override public int getDeploymentsCount() { return deployments_.size(); } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ @java.lang.Override public com.google.cloud.gsuiteaddons.v1.Deployment getDeployments(int index) { return deployments_.get(index); } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ @java.lang.Override public com.google.cloud.gsuiteaddons.v1.DeploymentOrBuilder getDeploymentsOrBuilder(int index) { return deployments_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < deployments_.size(); i++) { output.writeMessage(1, deployments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < deployments_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, deployments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse)) { return super.equals(obj); } com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse other = (com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse) obj; if (!getDeploymentsList().equals(other.getDeploymentsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getDeploymentsCount() > 0) { hash = (37 * hash) + DEPLOYMENTS_FIELD_NUMBER; hash = (53 * hash) + getDeploymentsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message to list deployments. * </pre> * * Protobuf type {@code google.cloud.gsuiteaddons.v1.ListDeploymentsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.gsuiteaddons.v1.ListDeploymentsResponse) com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gsuiteaddons.v1.GSuiteAddOnsProto .internal_static_google_cloud_gsuiteaddons_v1_ListDeploymentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gsuiteaddons.v1.GSuiteAddOnsProto .internal_static_google_cloud_gsuiteaddons_v1_ListDeploymentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse.class, com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse.Builder.class); } // Construct using com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (deploymentsBuilder_ == null) { deployments_ = java.util.Collections.emptyList(); } else { deployments_ = null; deploymentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.gsuiteaddons.v1.GSuiteAddOnsProto .internal_static_google_cloud_gsuiteaddons_v1_ListDeploymentsResponse_descriptor; } @java.lang.Override public com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse getDefaultInstanceForType() { return com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse build() { com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse buildPartial() { com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse result = new com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse result) { if (deploymentsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { deployments_ = java.util.Collections.unmodifiableList(deployments_); bitField0_ = (bitField0_ & ~0x00000001); } result.deployments_ = deployments_; } else { result.deployments_ = deploymentsBuilder_.build(); } } private void buildPartial0(com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse) { return mergeFrom((com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse other) { if (other == com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse.getDefaultInstance()) return this; if (deploymentsBuilder_ == null) { if (!other.deployments_.isEmpty()) { if (deployments_.isEmpty()) { deployments_ = other.deployments_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDeploymentsIsMutable(); deployments_.addAll(other.deployments_); } onChanged(); } } else { if (!other.deployments_.isEmpty()) { if (deploymentsBuilder_.isEmpty()) { deploymentsBuilder_.dispose(); deploymentsBuilder_ = null; deployments_ = other.deployments_; bitField0_ = (bitField0_ & ~0x00000001); deploymentsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDeploymentsFieldBuilder() : null; } else { deploymentsBuilder_.addAllMessages(other.deployments_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.gsuiteaddons.v1.Deployment m = input.readMessage( com.google.cloud.gsuiteaddons.v1.Deployment.parser(), extensionRegistry); if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.add(m); } else { deploymentsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.gsuiteaddons.v1.Deployment> deployments_ = java.util.Collections.emptyList(); private void ensureDeploymentsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { deployments_ = new java.util.ArrayList<com.google.cloud.gsuiteaddons.v1.Deployment>(deployments_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.gsuiteaddons.v1.Deployment, com.google.cloud.gsuiteaddons.v1.Deployment.Builder, com.google.cloud.gsuiteaddons.v1.DeploymentOrBuilder> deploymentsBuilder_; /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public java.util.List<com.google.cloud.gsuiteaddons.v1.Deployment> getDeploymentsList() { if (deploymentsBuilder_ == null) { return java.util.Collections.unmodifiableList(deployments_); } else { return deploymentsBuilder_.getMessageList(); } } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public int getDeploymentsCount() { if (deploymentsBuilder_ == null) { return deployments_.size(); } else { return deploymentsBuilder_.getCount(); } } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public com.google.cloud.gsuiteaddons.v1.Deployment getDeployments(int index) { if (deploymentsBuilder_ == null) { return deployments_.get(index); } else { return deploymentsBuilder_.getMessage(index); } } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder setDeployments(int index, com.google.cloud.gsuiteaddons.v1.Deployment value) { if (deploymentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDeploymentsIsMutable(); deployments_.set(index, value); onChanged(); } else { deploymentsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder setDeployments( int index, com.google.cloud.gsuiteaddons.v1.Deployment.Builder builderForValue) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.set(index, builderForValue.build()); onChanged(); } else { deploymentsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder addDeployments(com.google.cloud.gsuiteaddons.v1.Deployment value) { if (deploymentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDeploymentsIsMutable(); deployments_.add(value); onChanged(); } else { deploymentsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder addDeployments(int index, com.google.cloud.gsuiteaddons.v1.Deployment value) { if (deploymentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDeploymentsIsMutable(); deployments_.add(index, value); onChanged(); } else { deploymentsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder addDeployments( com.google.cloud.gsuiteaddons.v1.Deployment.Builder builderForValue) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.add(builderForValue.build()); onChanged(); } else { deploymentsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder addDeployments( int index, com.google.cloud.gsuiteaddons.v1.Deployment.Builder builderForValue) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.add(index, builderForValue.build()); onChanged(); } else { deploymentsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder addAllDeployments( java.lang.Iterable<? extends com.google.cloud.gsuiteaddons.v1.Deployment> values) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, deployments_); onChanged(); } else { deploymentsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder clearDeployments() { if (deploymentsBuilder_ == null) { deployments_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { deploymentsBuilder_.clear(); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public Builder removeDeployments(int index) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.remove(index); onChanged(); } else { deploymentsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public com.google.cloud.gsuiteaddons.v1.Deployment.Builder getDeploymentsBuilder(int index) { return getDeploymentsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public com.google.cloud.gsuiteaddons.v1.DeploymentOrBuilder getDeploymentsOrBuilder(int index) { if (deploymentsBuilder_ == null) { return deployments_.get(index); } else { return deploymentsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public java.util.List<? extends com.google.cloud.gsuiteaddons.v1.DeploymentOrBuilder> getDeploymentsOrBuilderList() { if (deploymentsBuilder_ != null) { return deploymentsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(deployments_); } } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public com.google.cloud.gsuiteaddons.v1.Deployment.Builder addDeploymentsBuilder() { return getDeploymentsFieldBuilder() .addBuilder(com.google.cloud.gsuiteaddons.v1.Deployment.getDefaultInstance()); } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public com.google.cloud.gsuiteaddons.v1.Deployment.Builder addDeploymentsBuilder(int index) { return getDeploymentsFieldBuilder() .addBuilder(index, com.google.cloud.gsuiteaddons.v1.Deployment.getDefaultInstance()); } /** * * * <pre> * The list of deployments for the given project. * </pre> * * <code>repeated .google.cloud.gsuiteaddons.v1.Deployment deployments = 1;</code> */ public java.util.List<com.google.cloud.gsuiteaddons.v1.Deployment.Builder> getDeploymentsBuilderList() { return getDeploymentsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.gsuiteaddons.v1.Deployment, com.google.cloud.gsuiteaddons.v1.Deployment.Builder, com.google.cloud.gsuiteaddons.v1.DeploymentOrBuilder> getDeploymentsFieldBuilder() { if (deploymentsBuilder_ == null) { deploymentsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.gsuiteaddons.v1.Deployment, com.google.cloud.gsuiteaddons.v1.Deployment.Builder, com.google.cloud.gsuiteaddons.v1.DeploymentOrBuilder>( deployments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); deployments_ = null; } return deploymentsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.gsuiteaddons.v1.ListDeploymentsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.gsuiteaddons.v1.ListDeploymentsResponse) private static final com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse(); } public static com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDeploymentsResponse> PARSER = new com.google.protobuf.AbstractParser<ListDeploymentsResponse>() { @java.lang.Override public ListDeploymentsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDeploymentsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDeploymentsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.gsuiteaddons.v1.ListDeploymentsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
37,260
flink-runtime/src/test/java/org/apache/flink/runtime/operators/JoinTaskTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.FlatJoinFunction; import org.apache.flink.runtime.operators.testutils.DelayingInfinitiveInputIterator; import org.apache.flink.runtime.operators.testutils.DriverTestBase; import org.apache.flink.runtime.operators.testutils.ExpectedTestException; import org.apache.flink.runtime.operators.testutils.NirvanaOutputList; import org.apache.flink.runtime.operators.testutils.TaskCancelThread; import org.apache.flink.runtime.operators.testutils.UniformRecordGenerator; import org.apache.flink.runtime.testutils.recordutils.RecordComparator; import org.apache.flink.runtime.testutils.recordutils.RecordPairComparatorFactory; import org.apache.flink.types.IntValue; import org.apache.flink.types.Record; import org.apache.flink.types.Value; import org.apache.flink.util.Collector; import org.junit.jupiter.api.TestTemplate; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.fail; class JoinTaskTest extends DriverTestBase<FlatJoinFunction<Record, Record, Record>> { private static final long HASH_MEM = 6 * 1024 * 1024; private static final long SORT_MEM = 3 * 1024 * 1024; private static final int NUM_SORTER = 2; private static final long BNLJN_MEM = 10 * PAGE_SIZE; private final double bnljn_frac; private final double hash_frac; @SuppressWarnings("unchecked") private final RecordComparator comparator1 = new RecordComparator( new int[] {0}, (Class<? extends Value>[]) new Class<?>[] {IntValue.class}); @SuppressWarnings("unchecked") private final RecordComparator comparator2 = new RecordComparator( new int[] {0}, (Class<? extends Value>[]) new Class<?>[] {IntValue.class}); private final List<Record> outList = new ArrayList<>(); JoinTaskTest(ExecutionConfig config) { super(config, HASH_MEM, NUM_SORTER, SORT_MEM); bnljn_frac = (double) BNLJN_MEM / this.getMemoryManager().getMemorySize(); hash_frac = (double) HASH_MEM / this.getMemoryManager().getMemorySize(); } @TestTemplate void testSortBoth1MatchTask() { final int keyCnt1 = 20; final int valCnt1 = 1; final int keyCnt2 = 10; final int valCnt2 = 2; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted( new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInputSorted( new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate()); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testSortBoth2MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 1; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted( new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInputSorted( new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate()); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testSortBoth3MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 20; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted( new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInputSorted( new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate()); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testSortBoth4MatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 1; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted( new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInputSorted( new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate()); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testSortBoth5MatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted( new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInputSorted( new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate()); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testSortFirstMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted( new UniformRecordGenerator(keyCnt1, valCnt1, false), this.comparator1.duplicate()); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, true)); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testSortSecondMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInput(new UniformRecordGenerator(keyCnt1, valCnt1, true)); addInputSorted( new UniformRecordGenerator(keyCnt2, valCnt2, false), this.comparator2.duplicate()); testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testMergeMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; setOutput(this.outList); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); addInput(new UniformRecordGenerator(keyCnt1, valCnt1, true)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, true)); try { testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList) .withFailMessage("Resultset size was %d. Expected was %d", outList.size(), expCnt) .hasSize(expCnt); this.outList.clear(); } @TestTemplate void testFailingMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; setOutput(new NirvanaOutputList()); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); addInput(new UniformRecordGenerator(keyCnt1, valCnt1, true)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, true)); assertThatThrownBy(() -> testDriver(testTask, MockFailingMatchStub.class)) .isInstanceOf(ExpectedTestException.class); } @TestTemplate void testCancelMatchTaskWhileSort1() { final int keyCnt = 20; final int valCnt = 20; try { setOutput(new NirvanaOutputList()); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInputSorted( new DelayingInfinitiveInputIterator(100), this.comparator1.duplicate()); addInput(new UniformRecordGenerator(keyCnt, valCnt, true)); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } final AtomicReference<Throwable> error = new AtomicReference<>(); Thread taskRunner = new Thread("Task runner for testCancelMatchTaskWhileSort1()") { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); } catch (Throwable t) { error.set(t); } } }; taskRunner.start(); Thread.sleep(1000); cancel(); taskRunner.interrupt(); taskRunner.join(60000); assertThat(taskRunner.isAlive()) .withFailMessage("Task thread did not finish within 60 seconds") .isFalse(); Throwable taskError = error.get(); assertThat(taskError) .withFailMessage("Error in task while canceling: %s", taskError) .isNull(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @TestTemplate void testCancelMatchTaskWhileSort2() { final int keyCnt = 20; final int valCnt = 20; try { setOutput(new NirvanaOutputList()); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { addInput(new UniformRecordGenerator(keyCnt, valCnt, true)); addInputSorted( new DelayingInfinitiveInputIterator(100), this.comparator1.duplicate()); } catch (Exception e) { e.printStackTrace(); fail("The test caused an exception."); } final AtomicReference<Throwable> error = new AtomicReference<>(); Thread taskRunner = new Thread("Task runner for testCancelMatchTaskWhileSort2()") { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); } catch (Throwable t) { error.set(t); } } }; taskRunner.start(); Thread.sleep(1000); cancel(); taskRunner.interrupt(); taskRunner.join(60000); assertThat(taskRunner.isAlive()) .withFailMessage("Task thread did not finish within 60 seconds") .isFalse(); Throwable taskError = error.get(); assertThat(taskError) .withFailMessage("Error in task while canceling: %s", taskError) .isNull(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @TestTemplate void testCancelMatchTaskWhileMatching() { final int keyCnt = 20; final int valCnt = 20; try { setOutput(new NirvanaOutputList()); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); getTaskConfig().setDriverStrategy(DriverStrategy.INNER_MERGE); getTaskConfig().setRelativeMemoryDriver(bnljn_frac); setNumFileHandlesForSort(4); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); addInput(new UniformRecordGenerator(keyCnt, valCnt, true)); addInput(new UniformRecordGenerator(keyCnt, valCnt, true)); final AtomicReference<Throwable> error = new AtomicReference<>(); Thread taskRunner = new Thread("Task runner for testCancelMatchTaskWhileMatching()") { @Override public void run() { try { testDriver(testTask, MockDelayingMatchStub.class); } catch (Throwable t) { error.set(t); } } }; taskRunner.start(); Thread.sleep(1000); cancel(); taskRunner.interrupt(); taskRunner.join(60000); assertThat(taskRunner.isAlive()) .withFailMessage("Task thread did not finish within 60 seconds") .isFalse(); Throwable taskError = error.get(); assertThat(taskError) .withFailMessage("Error in task while canceling:\n%s", taskError) .isNull(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @TestTemplate void testHash1MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 10; int valCnt2 = 2; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("Test caused an exception."); } final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList).hasSize(expCnt); this.outList.clear(); } @TestTemplate void testHash2MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 1; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("Test caused an exception."); } final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt); this.outList.clear(); } @TestTemplate void testHash3MatchTask() { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("Test caused an exception."); } final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt); this.outList.clear(); } @TestTemplate void testHash4MatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 1; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("Test caused an exception."); } final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt); this.outList.clear(); } @TestTemplate void testHash5MatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(this.outList); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); try { testDriver(testTask, MockMatchStub.class); } catch (Exception e) { e.printStackTrace(); fail("Test caused an exception."); } final int expCnt = valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2); assertThat(this.outList).withFailMessage("Wrong result set size.").hasSize(expCnt); this.outList.clear(); } @TestTemplate void testFailingHashFirstMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); assertThatThrownBy(() -> testDriver(testTask, MockFailingMatchStub.class)) .isInstanceOf(ExpectedTestException.class); } @TestTemplate void testFailingHashSecondMatchTask() { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; addInput(new UniformRecordGenerator(keyCnt1, valCnt1, false)); addInput(new UniformRecordGenerator(keyCnt2, valCnt2, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(hash_frac); JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); assertThatThrownBy(() -> testDriver(testTask, MockFailingMatchStub.class)) .isInstanceOf(ExpectedTestException.class); } @TestTemplate void testCancelHashMatchTaskWhileBuildFirst() { final int keyCnt = 20; final int valCnt = 20; try { addInput(new DelayingInfinitiveInputIterator(100)); addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(hash_frac); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); Thread.sleep(1000); cancel(); try { taskRunner.join(); } catch (InterruptedException ie) { fail("Joining threads failed"); } assertThat(success) .withFailMessage( "Test threw an exception even though it was properly canceled.") .isTrue(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @TestTemplate void testHashCancelMatchTaskWhileBuildSecond() { final int keyCnt = 20; final int valCnt = 20; try { addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(new DelayingInfinitiveInputIterator(100)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(hash_frac); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); Thread.sleep(1000); cancel(); try { taskRunner.join(); } catch (InterruptedException ie) { fail("Joining threads failed"); } assertThat(success) .withFailMessage( "Test threw an exception even though it was properly canceled.") .isTrue(); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @TestTemplate void testHashFirstCancelMatchTaskWhileMatching() { int keyCnt = 20; int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_FIRST); getTaskConfig().setRelativeMemoryDriver(hash_frac); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this); tct.start(); try { tct.join(); taskRunner.join(); } catch (InterruptedException ie) { fail("Joining threads failed"); } assertThat(success) .withFailMessage("Test threw an exception even though it was properly canceled.") .isTrue(); } @TestTemplate void testHashSecondCancelMatchTaskWhileMatching() { int keyCnt = 20; int valCnt = 20; addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addInput(new UniformRecordGenerator(keyCnt, valCnt, false)); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(RecordPairComparatorFactory.get()); setOutput(new NirvanaOutputList()); getTaskConfig().setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(hash_frac); final JoinDriver<Record, Record, Record> testTask = new JoinDriver<>(); final AtomicBoolean success = new AtomicBoolean(false); Thread taskRunner = new Thread() { @Override public void run() { try { testDriver(testTask, MockMatchStub.class); success.set(true); } catch (Exception ie) { ie.printStackTrace(); } } }; taskRunner.start(); TaskCancelThread tct = new TaskCancelThread(1, taskRunner, this); tct.start(); try { tct.join(); taskRunner.join(); } catch (InterruptedException ie) { fail("Joining threads failed"); } assertThat(success) .withFailMessage("Test threw an exception even though it was properly canceled.") .isTrue(); } // ================================================================================================= public static final class MockMatchStub implements FlatJoinFunction<Record, Record, Record> { private static final long serialVersionUID = 1L; @Override public void join(Record record1, Record record2, Collector<Record> out) throws Exception { out.collect(record1); } } public static final class MockFailingMatchStub implements FlatJoinFunction<Record, Record, Record> { private static final long serialVersionUID = 1L; private int cnt = 0; @Override public void join(Record record1, Record record2, Collector<Record> out) throws Exception { if (++this.cnt >= 10) { throw new ExpectedTestException(); } out.collect(record1); } } public static final class MockDelayingMatchStub implements FlatJoinFunction<Record, Record, Record> { private static final long serialVersionUID = 1L; @Override public void join(Record record1, Record record2, Collector<Record> out) throws Exception { try { Thread.sleep(100); } catch (InterruptedException e) { } } } }
apache/pulsar
37,226
pulsar-broker/src/test/java/org/apache/pulsar/broker/admin/v1/V1AdminApi2Test.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.admin.v1; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import com.google.common.collect.Sets; import java.net.URL; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import lombok.Cleanup; import org.apache.bookkeeper.mledger.impl.ManagedCursorImpl; import org.apache.bookkeeper.mledger.impl.ManagedLedgerImpl; import org.apache.pulsar.broker.admin.v1.V1AdminApiTest.MockedPulsarService; import org.apache.pulsar.broker.auth.MockedPulsarServiceBaseTest; import org.apache.pulsar.broker.loadbalance.impl.ModularLoadManagerImpl; import org.apache.pulsar.broker.loadbalance.impl.SimpleLoadManagerImpl; import org.apache.pulsar.broker.service.Topic; import org.apache.pulsar.broker.service.persistent.PersistentTopic; import org.apache.pulsar.client.admin.PulsarAdmin; import org.apache.pulsar.client.admin.PulsarAdminException; import org.apache.pulsar.client.admin.PulsarAdminException.PreconditionFailedException; import org.apache.pulsar.client.api.Consumer; import org.apache.pulsar.client.api.Message; import org.apache.pulsar.client.api.MessageRoutingMode; import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.PulsarClient; import org.apache.pulsar.client.api.SubscriptionType; import org.apache.pulsar.client.impl.MessageIdImpl; import org.apache.pulsar.common.naming.TopicDomain; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.policies.data.ClusterData; import org.apache.pulsar.common.policies.data.ConsumerStats; import org.apache.pulsar.common.policies.data.FailureDomain; import org.apache.pulsar.common.policies.data.NonPersistentTopicStats; import org.apache.pulsar.common.policies.data.PartitionedTopicStats; import org.apache.pulsar.common.policies.data.PersistencePolicies; import org.apache.pulsar.common.policies.data.PersistentTopicInternalStats; import org.apache.pulsar.common.policies.data.RetentionPolicies; import org.apache.pulsar.common.policies.data.SubscriptionStats; import org.apache.pulsar.common.policies.data.TenantInfoImpl; import org.apache.pulsar.common.policies.data.TopicStats; import org.awaitility.Awaitility; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; @Test(groups = "broker-admin") public class V1AdminApi2Test extends MockedPulsarServiceBaseTest { private MockedPulsarService mockPulsarSetup; @BeforeMethod @Override public void setup() throws Exception { conf.setTopicLevelPoliciesEnabled(false); conf.setSystemTopicEnabled(false); conf.setLoadBalancerEnabled(true); super.internalSetup(); // create other broker to test redirect on calls that need // namespace ownership mockPulsarSetup = new MockedPulsarService(this.conf); mockPulsarSetup.setup(); // Setup namespaces admin.clusters().createCluster("use", ClusterData.builder().serviceUrl(pulsar.getWebServiceAddress()).build()); TenantInfoImpl tenantInfo = new TenantInfoImpl(Set.of("role1", "role2"), Set.of("use")); admin.tenants().createTenant("prop-xyz", tenantInfo); admin.namespaces().createNamespace("prop-xyz/use/ns1"); } @AfterMethod(alwaysRun = true) @Override public void cleanup() throws Exception { super.internalCleanup(); mockPulsarSetup.cleanup(); } @DataProvider(name = "topicType") public Object[][] topicTypeProvider() { return new Object[][] { { TopicDomain.persistent.value() }, { TopicDomain.non_persistent.value() } }; } @DataProvider(name = "namespaceNames") public Object[][] namespaceNameProvider() { return new Object[][] { { "ns1" }, { "global" } }; } /** * <pre> * It verifies increasing partitions for partitioned-topic. * 1. create a partitioned-topic * 2. update partitions with larger number of partitions * 3. verify: getPartitionedMetadata and check number of partitions * 4. verify: this api creates existing subscription to new partitioned-topics * so, message will not be lost in new partitions * a. start producer and produce messages * b. check existing subscription for new topics and it should have backlog msgs * * </pre> * * @throws Exception */ @Test public void testIncrementPartitionsOfTopic() throws Exception { final String topicName = "increment-partitionedTopic"; final String subName1 = topicName + "-my-sub-1"; final String subName2 = topicName + "-my-sub-2"; final int startPartitions = 4; final int newPartitions = 8; final String partitionedTopicName = "persistent://prop-xyz/use/ns1/" + topicName; URL pulsarUrl = new URL(pulsar.getWebServiceAddress()); admin.topics().createPartitionedTopic(partitionedTopicName, startPartitions); // validate partition topic is created assertEquals(admin.topics().getPartitionedTopicMetadata(partitionedTopicName).partitions, startPartitions); // create consumer and subscriptions : check subscriptions @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(pulsarUrl.toString()).build(); Consumer<byte[]> consumer1 = client.newConsumer().topic(partitionedTopicName).subscriptionName(subName1) .subscriptionType(SubscriptionType.Shared).subscribe(); assertEquals(admin.topics().getSubscriptions(partitionedTopicName), List.of(subName1)); Consumer<byte[]> consumer2 = client.newConsumer().topic(partitionedTopicName).subscriptionName(subName2) .subscriptionType(SubscriptionType.Shared).subscribe(); assertEquals(new HashSet<>(admin.topics().getSubscriptions(partitionedTopicName)), Set.of(subName1, subName2)); // (1) update partitions admin.topics().updatePartitionedTopic(partitionedTopicName, newPartitions); // verify new partitions have been created assertEquals(admin.topics().getPartitionedTopicMetadata(partitionedTopicName).partitions, newPartitions); // (2) No Msg loss: verify new partitions have the same existing subscription names final String newPartitionTopicName = TopicName.get(partitionedTopicName).getPartition(startPartitions + 1) .toString(); // (3) produce messages to all partitions including newly created partitions (RoundRobin) Producer<byte[]> producer = client.newProducer().topic(partitionedTopicName) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.RoundRobinPartition) .create(); final int totalMessages = newPartitions * 2; for (int i = 0; i < totalMessages; i++) { String message = "message-" + i; producer.send(message.getBytes()); } // (4) verify existing subscription has not lost any message: create new consumer with sub-2: it will load all // newly created partition topics consumer2.close(); consumer2 = client.newConsumer().topic(partitionedTopicName).subscriptionName(subName2) .subscriptionType(SubscriptionType.Shared).subscribe(); assertEquals(new HashSet<>(admin.topics().getSubscriptions(newPartitionTopicName)), Set.of(subName1, subName2)); assertEquals(new HashSet<>(admin.topics().getList("prop-xyz/use/ns1")).size(), newPartitions); // test cumulative stats for partitioned topic PartitionedTopicStats topicStats = admin.topics().getPartitionedStats(partitionedTopicName, false); assertEquals(topicStats.getSubscriptions().keySet(), Set.of(subName1, subName2)); assertEquals(topicStats.getSubscriptions().get(subName2).getConsumers().size(), 1); assertEquals(topicStats.getSubscriptions().get(subName2).getMsgBacklog(), totalMessages); assertEquals(topicStats.getPublishers().size(), 1); assertEquals(topicStats.getPartitions(), new HashMap<>()); // (5) verify: each partition should have backlog topicStats = admin.topics().getPartitionedStats(partitionedTopicName, true); assertEquals(topicStats.getMetadata().partitions, newPartitions); Set<String> partitionSet = new HashSet<>(); for (int i = 0; i < newPartitions; i++) { partitionSet.add(partitionedTopicName + "-partition-" + i); } assertEquals(topicStats.getPartitions().keySet(), partitionSet); for (int i = 0; i < newPartitions; i++) { TopicStats partitionStats = topicStats.getPartitions() .get(TopicName.get(partitionedTopicName).getPartition(i).toString()); assertEquals(partitionStats.getPublishers().size(), 1); assertEquals(partitionStats.getSubscriptions().get(subName2).getConsumers().size(), 1); assertEquals(partitionStats.getSubscriptions().get(subName2).getMsgBacklog(), 2, 1); } producer.close(); consumer1.close(); consumer2.close(); consumer2.close(); } /** * verifies admin api command for non-persistent topic. It verifies: partitioned-topic, stats * * @throws Exception */ @Test public void nonPersistentTopics() throws Exception { final String topicName = "nonPersistentTopic"; final String persistentTopicName = "non-persistent://prop-xyz/use/ns1/" + topicName; // Force to create a topic publishMessagesOnTopic("non-persistent://prop-xyz/use/ns1/" + topicName, 0, 0); // create consumer and subscription @Cleanup PulsarClient client = PulsarClient.builder() .serviceUrl(pulsar.getWebServiceAddress()) .statsInterval(0, TimeUnit.SECONDS) .build(); Consumer<byte[]> consumer = client.newConsumer().topic(persistentTopicName).subscriptionName("my-sub") .subscribe(); publishMessagesOnTopic("non-persistent://prop-xyz/use/ns1/" + topicName, 10, 0); NonPersistentTopicStats topicStats = admin.nonPersistentTopics().getStats(persistentTopicName); assertEquals(topicStats.getSubscriptions().keySet(), Set.of("my-sub")); assertEquals(topicStats.getSubscriptions().get("my-sub").getConsumers().size(), 1); assertEquals(topicStats.getPublishers().size(), 0); PersistentTopicInternalStats internalStats = admin.nonPersistentTopics().getInternalStats(persistentTopicName); assertEquals(internalStats.cursors.keySet(), Set.of("my-sub")); consumer.close(); client.close(); topicStats = admin.nonPersistentTopics().getStats(persistentTopicName); assertFalse(topicStats.getSubscriptions().keySet().contains("my-sub")); assertEquals(topicStats.getPublishers().size(), 0); // test partitioned-topic final String partitionedTopicName = "non-persistent://prop-xyz/use/ns1/paritioned"; try { admin.nonPersistentTopics().getPartitionedTopicMetadata(partitionedTopicName); fail("Should have failed"); } catch (Exception ex) { assertTrue(ex instanceof PulsarAdminException.NotFoundException); } admin.nonPersistentTopics().createPartitionedTopic(partitionedTopicName, 5); assertEquals(admin.nonPersistentTopics().getPartitionedTopicMetadata(partitionedTopicName).partitions, 5); } private void publishMessagesOnTopic(String topicName, int messages, int startIdx) throws Exception { Producer<byte[]> producer = pulsarClient.newProducer() .topic(topicName) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create(); for (int i = startIdx; i < (messages + startIdx); i++) { String message = "message-" + i; producer.send(message.getBytes()); } producer.close(); } /** * verifies validation on persistent-policies. * * @throws Exception */ @Test public void testSetPersistencepolicies() throws Exception { final String namespace = "prop-xyz/use/ns2"; admin.namespaces().createNamespace(namespace); assertEquals(admin.namespaces().getPersistence(namespace), null); admin.namespaces().setPersistence(namespace, new PersistencePolicies(3, 3, 3, 10.0)); assertEquals(admin.namespaces().getPersistence(namespace), new PersistencePolicies(3, 3, 3, 10.0)); try { admin.namespaces().setPersistence(namespace, new PersistencePolicies(3, 4, 3, 10.0)); fail("should have failed"); } catch (PulsarAdminException e) { assertEquals(e.getStatusCode(), 400); } try { admin.namespaces().setPersistence(namespace, new PersistencePolicies(3, 3, 4, 10.0)); fail("should have failed"); } catch (PulsarAdminException e) { assertEquals(e.getStatusCode(), 400); } try { admin.namespaces().setPersistence(namespace, new PersistencePolicies(6, 3, 1, 10.0)); fail("should have failed"); } catch (PulsarAdminException e) { assertEquals(e.getStatusCode(), 400); } // make sure policies has not been changed assertEquals(admin.namespaces().getPersistence(namespace), new PersistencePolicies(3, 3, 3, 10.0)); } /** * validates update of persistent-policies reflects on managed-ledger and managed-cursor. * * @throws Exception */ @Test public void testUpdatePersistencePolicyUpdateManagedCursor() throws Exception { final String namespace = "prop-xyz/use/ns2"; final String topicName = "persistent://" + namespace + "/topic1"; admin.namespaces().createNamespace(namespace); admin.namespaces().setPersistence(namespace, new PersistencePolicies(3, 3, 3, 50.0)); assertEquals(admin.namespaces().getPersistence(namespace), new PersistencePolicies(3, 3, 3, 50.0)); Producer<byte[]> producer = pulsarClient.newProducer() .topic(topicName) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub").subscribe(); PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get(); ManagedLedgerImpl managedLedger = (ManagedLedgerImpl) topic.getManagedLedger(); ManagedCursorImpl cursor = (ManagedCursorImpl) managedLedger.getCursors().iterator().next(); final double newThrottleRate = 100; final int newEnsembleSize = 5; admin.namespaces().setPersistence(namespace, new PersistencePolicies(newEnsembleSize, 3, 3, newThrottleRate)); retryStrategically((test) -> managedLedger.getConfig().getEnsembleSize() == newEnsembleSize && cursor.getThrottleMarkDelete() != newThrottleRate, 5, 200); // (1) verify cursor.markDelete has been updated assertEquals(cursor.getThrottleMarkDelete(), newThrottleRate); // (2) verify new ledger creation takes new config producer.close(); consumer.close(); } /** * Verify unloading topic. * * @throws Exception */ @Test(dataProvider = "topicType") public void testUnloadTopic(final String topicType) throws Exception { final String namespace = "prop-xyz/use/ns2"; final String topicName = topicType + "://" + namespace + "/topic1"; admin.namespaces().createNamespace(namespace); // create a topic by creating a producer Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create(); producer.close(); Topic topic = pulsar.getBrokerService().getTopicIfExists(topicName).join().get(); final boolean isPersistentTopic = topic instanceof PersistentTopic; // (1) unload the topic unloadTopic(topicName, isPersistentTopic); // topic must be removed from map assertFalse(pulsar.getBrokerService().getTopicReference(topicName).isPresent()); // recreation of producer will load the topic again producer = pulsarClient.newProducer().topic(topicName).create(); topic = pulsar.getBrokerService().getTopicReference(topicName).get(); assertNotNull(topic); // unload the topic unloadTopic(topicName, isPersistentTopic); // producer will retry and recreate the topic Awaitility.await().until(() -> pulsar.getBrokerService().getTopicReference(topicName).isPresent()); // topic should be loaded by this time topic = pulsar.getBrokerService().getTopicReference(topicName).get(); assertNotNull(topic); } private void unloadTopic(String topicName, boolean isPersistentTopic) throws Exception { if (isPersistentTopic) { admin.topics().unload(topicName); } else { admin.nonPersistentTopics().unload(topicName); } } /** * Verifies reset-cursor at specific position using admin-api. * * <pre> * 1. Publish 50 messages * 2. Consume 20 messages * 3. reset cursor position on 10th message * 4. consume 40 messages from reset position * </pre> * * @param namespaceName * @throws Exception */ @Test(dataProvider = "namespaceNames", timeOut = 10000) public void testResetCursorOnPosition(String namespaceName) throws Exception { final String topicName = "persistent://prop-xyz/use/" + namespaceName + "/resetPosition"; final int totalProducedMessages = 50; // set retention admin.namespaces().setRetention("prop-xyz/use/ns1", new RetentionPolicies(10, 10)); // create consumer and subscription Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub") .subscriptionType(SubscriptionType.Shared).subscribe(); assertEquals(admin.topics().getSubscriptions(topicName), List.of("my-sub")); publishMessagesOnPersistentTopic(topicName, totalProducedMessages, 0); List<Message<byte[]>> messages = admin.topics().peekMessages(topicName, "my-sub", 10); assertEquals(messages.size(), 10); Message<byte[]> message = null; MessageIdImpl resetMessageId = null; int resetPositionId = 10; for (int i = 0; i < 20; i++) { message = consumer.receive(1, TimeUnit.SECONDS); consumer.acknowledge(message); if (i == resetPositionId) { resetMessageId = (MessageIdImpl) message.getMessageId(); } } // close consumer which will clean up internal-receive-queue consumer.close(); // messages should still be available due to retention MessageIdImpl messageId = new MessageIdImpl(resetMessageId.getLedgerId(), resetMessageId.getEntryId(), -1); // reset position at resetMessageId admin.topics().resetCursor(topicName, "my-sub", messageId); consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub") .subscriptionType(SubscriptionType.Shared).subscribe(); MessageIdImpl msgId2 = (MessageIdImpl) consumer.receive(1, TimeUnit.SECONDS).getMessageId(); assertEquals(resetMessageId, msgId2); int receivedAfterReset = 1; // start with 1 because we have already received 1 msg for (int i = 0; i < totalProducedMessages; i++) { message = consumer.receive(500, TimeUnit.MILLISECONDS); if (message == null) { break; } consumer.acknowledge(message); ++receivedAfterReset; } assertEquals(receivedAfterReset, totalProducedMessages - resetPositionId); // invalid topic name try { admin.topics().resetCursor(topicName + "invalid", "my-sub", messageId); fail("It should have failed due to invalid topic name"); } catch (PulsarAdminException.NotFoundException e) { // Ok } // invalid cursor name try { admin.topics().resetCursor(topicName, "invalid-sub", messageId); fail("It should have failed due to invalid subscription name"); } catch (PulsarAdminException.NotFoundException e) { // Ok } // invalid position try { messageId = new MessageIdImpl(0, 0, -1); admin.topics().resetCursor(topicName, "my-sub", messageId); } catch (PulsarAdminException.PreconditionFailedException e) { fail("It shouldn't fail for a invalid position"); } consumer.close(); } private void publishMessagesOnPersistentTopic(String topicName, int messages, int startIdx) throws Exception { Producer<byte[]> producer = pulsarClient.newProducer() .topic(topicName) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create(); for (int i = startIdx; i < (messages + startIdx); i++) { String message = "message-" + i; producer.send(message.getBytes()); } producer.close(); } /** * It verifies that pulsar with different load-manager generates different load-report and returned by admin-api. * * @throws Exception */ @Test public void testLoadReportApi() throws Exception { this.conf.setLoadManagerClassName(SimpleLoadManagerImpl.class.getName()); MockedPulsarService mockPulsarSetup1 = new MockedPulsarService(this.conf); mockPulsarSetup1.setup(); PulsarAdmin simpleLoadManagerAdmin = mockPulsarSetup1.getAdmin(); assertNotNull(simpleLoadManagerAdmin.brokerStats().getLoadReport()); this.conf.setLoadManagerClassName(ModularLoadManagerImpl.class.getName()); MockedPulsarService mockPulsarSetup2 = new MockedPulsarService(this.conf); mockPulsarSetup2.setup(); PulsarAdmin modularLoadManagerAdmin = mockPulsarSetup2.getAdmin(); assertNotNull(modularLoadManagerAdmin.brokerStats().getLoadReport()); mockPulsarSetup1.cleanup(); mockPulsarSetup2.cleanup(); } @Test public void testPeerCluster() throws Exception { admin.clusters().createCluster("us-west1", ClusterData.builder().serviceUrl("http://broker.messaging.west1.example.com:8080").build()); admin.clusters().createCluster("us-west2", ClusterData.builder().serviceUrl("http://broker.messaging.west2.example.com:8080").build()); admin.clusters().createCluster("us-east1", ClusterData.builder().serviceUrl("http://broker.messaging.east1.example.com:8080").build()); admin.clusters().createCluster("us-east2", ClusterData.builder().serviceUrl("http://broker.messaging.east2.example.com:8080").build()); admin.clusters().updatePeerClusterNames("us-west1", Sets.newLinkedHashSet(List.of("us-west2"))); assertEquals(admin.clusters().getCluster("us-west1").getPeerClusterNames(), List.of("us-west2")); assertNull(admin.clusters().getCluster("us-west2").getPeerClusterNames()); // update cluster with duplicate peer-clusters in the list admin.clusters().updatePeerClusterNames("us-west1", Sets.newLinkedHashSet( List.of("us-west2", "us-east1", "us-west2", "us-east1", "us-west2", "us-east1"))); assertEquals(admin.clusters().getCluster("us-west1").getPeerClusterNames(), List.of("us-west2", "us-east1")); admin.clusters().updatePeerClusterNames("us-west1", null); assertNull(admin.clusters().getCluster("us-west1").getPeerClusterNames()); // Check name validation try { admin.clusters().updatePeerClusterNames("us-west1", Sets.newLinkedHashSet(List.of("invalid-cluster"))); fail("should have failed"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } // Cluster itselft can't be part of peer-list try { admin.clusters().updatePeerClusterNames("us-west1", Sets.newLinkedHashSet(List.of("us-west1"))); fail("should have failed"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } } /** * It validates that peer-cluster can't coexist in replication-cluster list. * * @throws Exception */ @Test public void testReplicationPeerCluster() throws Exception { admin.clusters().createCluster("us-west1", ClusterData.builder().serviceUrl("http://broker.messaging.west1.example.com:8080").build()); admin.clusters().createCluster("us-west2", ClusterData.builder().serviceUrl("http://broker.messaging.west2.example.com:8080").build()); admin.clusters().createCluster("us-west3", ClusterData.builder().serviceUrl("http://broker.messaging.west2.example.com:8080").build()); admin.clusters().createCluster("us-west4", ClusterData.builder().serviceUrl("http://broker.messaging.west2.example.com:8080").build()); admin.clusters().createCluster("us-east1", ClusterData.builder().serviceUrl("http://broker.messaging.east1.example.com:8080").build()); admin.clusters().createCluster("us-east2", ClusterData.builder().serviceUrl("http://broker.messaging.east2.example.com:8080").build()); admin.clusters().createCluster("global", ClusterData.builder().build()); final String property = "peer-prop"; Set<String> allowedClusters = Set.of("us-west1", "us-west2", "us-west3", "us-west4", "us-east1", "us-east2"); TenantInfoImpl propConfig = new TenantInfoImpl(Set.of("test"), allowedClusters); admin.tenants().createTenant(property, propConfig); final String namespace = property + "/global/conflictPeer"; admin.namespaces().createNamespace(namespace); admin.clusters().updatePeerClusterNames("us-west1", Sets.newLinkedHashSet(List.of("us-west2", "us-west3"))); assertEquals(admin.clusters().getCluster("us-west1").getPeerClusterNames(), List.of("us-west2", "us-west3")); // (1) no conflicting peer Set<String> clusterIds = Set.of("us-east1", "us-east2"); admin.namespaces().setNamespaceReplicationClusters(namespace, clusterIds); // (2) conflicting peer clusterIds = Set.of("us-west2", "us-west3", "us-west1"); try { admin.namespaces().setNamespaceReplicationClusters(namespace, clusterIds); fail("Peer-cluster can't coexist in replication cluster list"); } catch (PulsarAdminException.ConflictException e) { // Ok } clusterIds = Set.of("us-west2", "us-west3"); // no peer coexist in replication clusters admin.namespaces().setNamespaceReplicationClusters(namespace, clusterIds); clusterIds = Set.of("us-west1", "us-west4"); // no peer coexist in replication clusters admin.namespaces().setNamespaceReplicationClusters(namespace, clusterIds); } @Test public void clusterFailureDomain() throws PulsarAdminException { final String cluster = pulsar.getConfiguration().getClusterName(); admin.clusters().createCluster(cluster, ClusterData.builder() .serviceUrl(pulsar.getSafeWebServiceAddress()) .serviceUrlTls(pulsar.getWebServiceAddressTls()) .build()); // create FailureDomain domain = FailureDomain.builder() .brokers(Set.of("b1", "b2", "b3")) .build(); admin.clusters().createFailureDomain(cluster, "domain-1", domain); admin.clusters().updateFailureDomain(cluster, "domain-1", domain); assertEquals(admin.clusters().getFailureDomain(cluster, "domain-1"), domain); Map<String, FailureDomain> domains = admin.clusters().getFailureDomains(cluster); assertEquals(domains.size(), 1); assertTrue(domains.containsKey("domain-1")); try { // try to create domain with already registered brokers admin.clusters().createFailureDomain(cluster, "domain-2", domain); fail("should have failed because of brokers are already registered"); } catch (PulsarAdminException.ConflictException e) { // Ok } admin.clusters().deleteFailureDomain(cluster, "domain-1"); assertTrue(admin.clusters().getFailureDomains(cluster).isEmpty()); admin.clusters().createFailureDomain(cluster, "domain-2", domain); domains = admin.clusters().getFailureDomains(cluster); assertEquals(domains.size(), 1); assertTrue(domains.containsKey("domain-2")); } @Test public void namespaceAntiAffinity() throws PulsarAdminException { final String namespace = "prop-xyz/use/ns1"; final String antiAffinityGroup = "group"; assertTrue(isBlank(admin.namespaces().getNamespaceAntiAffinityGroup(namespace))); admin.namespaces().setNamespaceAntiAffinityGroup(namespace, antiAffinityGroup); assertEquals(admin.namespaces().getNamespaceAntiAffinityGroup(namespace), antiAffinityGroup); admin.namespaces().deleteNamespaceAntiAffinityGroup(namespace); assertTrue(isBlank(admin.namespaces().getNamespaceAntiAffinityGroup(namespace))); final String ns1 = "prop-xyz/use/antiAG1"; final String ns2 = "prop-xyz/use/antiAG2"; final String ns3 = "prop-xyz/use/antiAG3"; admin.namespaces().createNamespace(ns1); admin.namespaces().createNamespace(ns2); admin.namespaces().createNamespace(ns3); admin.namespaces().setNamespaceAntiAffinityGroup(ns1, antiAffinityGroup); admin.namespaces().setNamespaceAntiAffinityGroup(ns2, antiAffinityGroup); admin.namespaces().setNamespaceAntiAffinityGroup(ns3, antiAffinityGroup); Set<String> namespaces = new HashSet<>( admin.namespaces().getAntiAffinityNamespaces("prop-xyz", "use", antiAffinityGroup)); assertEquals(namespaces.size(), 3); assertTrue(namespaces.contains(ns1)); assertTrue(namespaces.contains(ns2)); assertTrue(namespaces.contains(ns3)); List<String> namespaces2 = admin.namespaces().getAntiAffinityNamespaces("prop-xyz", "use", "invalid-group"); assertEquals(namespaces2.size(), 0); } @Test public void testNonPersistentTopics() throws Exception { final String namespace = "prop-xyz/use/ns2"; final String topicName = "non-persistent://" + namespace + "/topic"; admin.namespaces().createNamespace(namespace, 20); int totalTopics = 100; Set<String> topicNames = new HashSet<>(); for (int i = 0; i < totalTopics; i++) { topicNames.add(topicName + i); Producer<byte[]> producer = pulsarClient.newProducer() .topic(topicName + i) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create(); producer.close(); } for (int i = 0; i < totalTopics; i++) { Topic topic = pulsar.getBrokerService().getTopicReference(topicName + i).get(); assertNotNull(topic); } Set<String> topicsInNs = Sets.newHashSet(admin.nonPersistentTopics().getList(namespace)); assertEquals(topicsInNs.size(), totalTopics); topicsInNs.removeAll(topicNames); assertEquals(topicsInNs.size(), 0); } @Test public void testPublishConsumerStats() throws Exception { final String topicName = "statTopic"; final String subscriberName = topicName + "-my-sub-1"; final String topic = "persistent://prop-xyz/use/ns1/" + topicName; final String producerName = "myProducer"; @Cleanup PulsarClient client = PulsarClient.builder().serviceUrl(pulsar.getWebServiceAddress()).build(); Consumer<byte[]> consumer = client.newConsumer().topic(topic).subscriptionName(subscriberName) .subscriptionType(SubscriptionType.Shared).subscribe(); Producer<byte[]> producer = client.newProducer() .topic(topic) .producerName(producerName) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create(); retryStrategically((test) -> { TopicStats stats; try { stats = admin.topics().getStats(topic); return stats.getPublishers().size() > 0 && stats.getSubscriptions().get(subscriberName) != null && stats.getSubscriptions().get(subscriberName).getConsumers().size() > 0; } catch (PulsarAdminException e) { return false; } }, 5, 200); TopicStats topicStats = admin.topics().getStats(topic); assertEquals(topicStats.getPublishers().size(), 1); assertNotNull(topicStats.getPublishers().get(0).getAddress()); assertNotNull(topicStats.getPublishers().get(0).getClientVersion()); assertNotNull(topicStats.getPublishers().get(0).getConnectedSince()); assertNotNull(topicStats.getPublishers().get(0).getProducerName()); assertEquals(topicStats.getPublishers().get(0).getProducerName(), producerName); SubscriptionStats subscriber = topicStats.getSubscriptions().get(subscriberName); assertNotNull(subscriber); assertEquals(subscriber.getConsumers().size(), 1); ConsumerStats consumerStats = subscriber.getConsumers().get(0); assertNotNull(consumerStats.getAddress()); assertNotNull(consumerStats.getClientVersion()); assertNotNull(consumerStats.getConnectedSince()); producer.close(); consumer.close(); } @Test public void testTenantNameWithUnderscore() throws Exception { TenantInfoImpl tenantInfo = new TenantInfoImpl(Set.of("role1", "role2"), Set.of("use")); admin.tenants().createTenant("prop_xyz", tenantInfo); admin.namespaces().createNamespace("prop_xyz/use/my-namespace"); String topic = "persistent://prop_xyz/use/my-namespace/my-topic"; Producer<byte[]> producer = pulsarClient.newProducer() .topic(topic) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create(); TopicStats stats = admin.topics().getStats(topic); assertEquals(stats.getPublishers().size(), 1); producer.close(); } @Test public void testTenantNameWithInvalidCharacters() throws Exception { TenantInfoImpl tenantInfo = new TenantInfoImpl(Set.of("role1", "role2"), Set.of("use")); // If we try to create property with invalid characters, it should fail immediately try { admin.tenants().createTenant("prop xyz", tenantInfo); fail("Should have failed"); } catch (PulsarAdminException e) { // Expected } try { admin.tenants().createTenant("prop&xyz", tenantInfo); fail("Should have failed"); } catch (PulsarAdminException e) { // Expected } } }
googleapis/google-cloud-java
37,153
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListTuningJobsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/genai_tuning_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Response message for * [GenAiTuningService.ListTuningJobs][google.cloud.aiplatform.v1.GenAiTuningService.ListTuningJobs] * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListTuningJobsResponse} */ public final class ListTuningJobsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListTuningJobsResponse) ListTuningJobsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListTuningJobsResponse.newBuilder() to construct. private ListTuningJobsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListTuningJobsResponse() { tuningJobs_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListTuningJobsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListTuningJobsResponse.class, com.google.cloud.aiplatform.v1.ListTuningJobsResponse.Builder.class); } public static final int TUNING_JOBS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1.TuningJob> tuningJobs_; /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1.TuningJob> getTuningJobsList() { return tuningJobs_; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1.TuningJobOrBuilder> getTuningJobsOrBuilderList() { return tuningJobs_; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ @java.lang.Override public int getTuningJobsCount() { return tuningJobs_.size(); } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.TuningJob getTuningJobs(int index) { return tuningJobs_.get(index); } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.TuningJobOrBuilder getTuningJobsOrBuilder(int index) { return tuningJobs_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListTuningJobsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListTuningJobsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tuningJobs_.size(); i++) { output.writeMessage(1, tuningJobs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tuningJobs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tuningJobs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ListTuningJobsResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ListTuningJobsResponse other = (com.google.cloud.aiplatform.v1.ListTuningJobsResponse) obj; if (!getTuningJobsList().equals(other.getTuningJobsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTuningJobsCount() > 0) { hash = (37 * hash) + TUNING_JOBS_FIELD_NUMBER; hash = (53 * hash) + getTuningJobsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.ListTuningJobsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [GenAiTuningService.ListTuningJobs][google.cloud.aiplatform.v1.GenAiTuningService.ListTuningJobs] * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListTuningJobsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListTuningJobsResponse) com.google.cloud.aiplatform.v1.ListTuningJobsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListTuningJobsResponse.class, com.google.cloud.aiplatform.v1.ListTuningJobsResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ListTuningJobsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (tuningJobsBuilder_ == null) { tuningJobs_ = java.util.Collections.emptyList(); } else { tuningJobs_ = null; tuningJobsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ListTuningJobsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsResponse build() { com.google.cloud.aiplatform.v1.ListTuningJobsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsResponse buildPartial() { com.google.cloud.aiplatform.v1.ListTuningJobsResponse result = new com.google.cloud.aiplatform.v1.ListTuningJobsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1.ListTuningJobsResponse result) { if (tuningJobsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tuningJobs_ = java.util.Collections.unmodifiableList(tuningJobs_); bitField0_ = (bitField0_ & ~0x00000001); } result.tuningJobs_ = tuningJobs_; } else { result.tuningJobs_ = tuningJobsBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1.ListTuningJobsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ListTuningJobsResponse) { return mergeFrom((com.google.cloud.aiplatform.v1.ListTuningJobsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListTuningJobsResponse other) { if (other == com.google.cloud.aiplatform.v1.ListTuningJobsResponse.getDefaultInstance()) return this; if (tuningJobsBuilder_ == null) { if (!other.tuningJobs_.isEmpty()) { if (tuningJobs_.isEmpty()) { tuningJobs_ = other.tuningJobs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTuningJobsIsMutable(); tuningJobs_.addAll(other.tuningJobs_); } onChanged(); } } else { if (!other.tuningJobs_.isEmpty()) { if (tuningJobsBuilder_.isEmpty()) { tuningJobsBuilder_.dispose(); tuningJobsBuilder_ = null; tuningJobs_ = other.tuningJobs_; bitField0_ = (bitField0_ & ~0x00000001); tuningJobsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTuningJobsFieldBuilder() : null; } else { tuningJobsBuilder_.addAllMessages(other.tuningJobs_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1.TuningJob m = input.readMessage( com.google.cloud.aiplatform.v1.TuningJob.parser(), extensionRegistry); if (tuningJobsBuilder_ == null) { ensureTuningJobsIsMutable(); tuningJobs_.add(m); } else { tuningJobsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1.TuningJob> tuningJobs_ = java.util.Collections.emptyList(); private void ensureTuningJobsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { tuningJobs_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.TuningJob>(tuningJobs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.TuningJob, com.google.cloud.aiplatform.v1.TuningJob.Builder, com.google.cloud.aiplatform.v1.TuningJobOrBuilder> tuningJobsBuilder_; /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.TuningJob> getTuningJobsList() { if (tuningJobsBuilder_ == null) { return java.util.Collections.unmodifiableList(tuningJobs_); } else { return tuningJobsBuilder_.getMessageList(); } } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public int getTuningJobsCount() { if (tuningJobsBuilder_ == null) { return tuningJobs_.size(); } else { return tuningJobsBuilder_.getCount(); } } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public com.google.cloud.aiplatform.v1.TuningJob getTuningJobs(int index) { if (tuningJobsBuilder_ == null) { return tuningJobs_.get(index); } else { return tuningJobsBuilder_.getMessage(index); } } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder setTuningJobs(int index, com.google.cloud.aiplatform.v1.TuningJob value) { if (tuningJobsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTuningJobsIsMutable(); tuningJobs_.set(index, value); onChanged(); } else { tuningJobsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder setTuningJobs( int index, com.google.cloud.aiplatform.v1.TuningJob.Builder builderForValue) { if (tuningJobsBuilder_ == null) { ensureTuningJobsIsMutable(); tuningJobs_.set(index, builderForValue.build()); onChanged(); } else { tuningJobsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder addTuningJobs(com.google.cloud.aiplatform.v1.TuningJob value) { if (tuningJobsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTuningJobsIsMutable(); tuningJobs_.add(value); onChanged(); } else { tuningJobsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder addTuningJobs(int index, com.google.cloud.aiplatform.v1.TuningJob value) { if (tuningJobsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTuningJobsIsMutable(); tuningJobs_.add(index, value); onChanged(); } else { tuningJobsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder addTuningJobs(com.google.cloud.aiplatform.v1.TuningJob.Builder builderForValue) { if (tuningJobsBuilder_ == null) { ensureTuningJobsIsMutable(); tuningJobs_.add(builderForValue.build()); onChanged(); } else { tuningJobsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder addTuningJobs( int index, com.google.cloud.aiplatform.v1.TuningJob.Builder builderForValue) { if (tuningJobsBuilder_ == null) { ensureTuningJobsIsMutable(); tuningJobs_.add(index, builderForValue.build()); onChanged(); } else { tuningJobsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder addAllTuningJobs( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.TuningJob> values) { if (tuningJobsBuilder_ == null) { ensureTuningJobsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tuningJobs_); onChanged(); } else { tuningJobsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder clearTuningJobs() { if (tuningJobsBuilder_ == null) { tuningJobs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tuningJobsBuilder_.clear(); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public Builder removeTuningJobs(int index) { if (tuningJobsBuilder_ == null) { ensureTuningJobsIsMutable(); tuningJobs_.remove(index); onChanged(); } else { tuningJobsBuilder_.remove(index); } return this; } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public com.google.cloud.aiplatform.v1.TuningJob.Builder getTuningJobsBuilder(int index) { return getTuningJobsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public com.google.cloud.aiplatform.v1.TuningJobOrBuilder getTuningJobsOrBuilder(int index) { if (tuningJobsBuilder_ == null) { return tuningJobs_.get(index); } else { return tuningJobsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1.TuningJobOrBuilder> getTuningJobsOrBuilderList() { if (tuningJobsBuilder_ != null) { return tuningJobsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tuningJobs_); } } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public com.google.cloud.aiplatform.v1.TuningJob.Builder addTuningJobsBuilder() { return getTuningJobsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1.TuningJob.getDefaultInstance()); } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public com.google.cloud.aiplatform.v1.TuningJob.Builder addTuningJobsBuilder(int index) { return getTuningJobsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1.TuningJob.getDefaultInstance()); } /** * * * <pre> * List of TuningJobs in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TuningJob tuning_jobs = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.TuningJob.Builder> getTuningJobsBuilderList() { return getTuningJobsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.TuningJob, com.google.cloud.aiplatform.v1.TuningJob.Builder, com.google.cloud.aiplatform.v1.TuningJobOrBuilder> getTuningJobsFieldBuilder() { if (tuningJobsBuilder_ == null) { tuningJobsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.TuningJob, com.google.cloud.aiplatform.v1.TuningJob.Builder, com.google.cloud.aiplatform.v1.TuningJobOrBuilder>( tuningJobs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); tuningJobs_ = null; } return tuningJobsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListTuningJobsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListTuningJobsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListTuningJobsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListTuningJobsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListTuningJobsRequest.page_token][google.cloud.aiplatform.v1.ListTuningJobsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListTuningJobsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListTuningJobsResponse) private static final com.google.cloud.aiplatform.v1.ListTuningJobsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListTuningJobsResponse(); } public static com.google.cloud.aiplatform.v1.ListTuningJobsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListTuningJobsResponse> PARSER = new com.google.protobuf.AbstractParser<ListTuningJobsResponse>() { @java.lang.Override public ListTuningJobsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListTuningJobsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListTuningJobsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,315
java-shopping-merchant-accounts/grpc-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/UserServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.shopping.merchant.accounts.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service to support user API. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/shopping/merchant/accounts/v1/user.proto") @io.grpc.stub.annotations.GrpcGenerated public final class UserServiceGrpc { private UserServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.shopping.merchant.accounts.v1.UserService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetUserRequest, com.google.shopping.merchant.accounts.v1.User> getGetUserMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetUser", requestType = com.google.shopping.merchant.accounts.v1.GetUserRequest.class, responseType = com.google.shopping.merchant.accounts.v1.User.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetUserRequest, com.google.shopping.merchant.accounts.v1.User> getGetUserMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetUserRequest, com.google.shopping.merchant.accounts.v1.User> getGetUserMethod; if ((getGetUserMethod = UserServiceGrpc.getGetUserMethod) == null) { synchronized (UserServiceGrpc.class) { if ((getGetUserMethod = UserServiceGrpc.getGetUserMethod) == null) { UserServiceGrpc.getGetUserMethod = getGetUserMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.GetUserRequest, com.google.shopping.merchant.accounts.v1.User> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetUser")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.GetUserRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.User.getDefaultInstance())) .setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("GetUser")) .build(); } } } return getGetUserMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.CreateUserRequest, com.google.shopping.merchant.accounts.v1.User> getCreateUserMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateUser", requestType = com.google.shopping.merchant.accounts.v1.CreateUserRequest.class, responseType = com.google.shopping.merchant.accounts.v1.User.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.CreateUserRequest, com.google.shopping.merchant.accounts.v1.User> getCreateUserMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.CreateUserRequest, com.google.shopping.merchant.accounts.v1.User> getCreateUserMethod; if ((getCreateUserMethod = UserServiceGrpc.getCreateUserMethod) == null) { synchronized (UserServiceGrpc.class) { if ((getCreateUserMethod = UserServiceGrpc.getCreateUserMethod) == null) { UserServiceGrpc.getCreateUserMethod = getCreateUserMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.CreateUserRequest, com.google.shopping.merchant.accounts.v1.User> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateUser")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.CreateUserRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.User.getDefaultInstance())) .setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("CreateUser")) .build(); } } } return getCreateUserMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DeleteUserRequest, com.google.protobuf.Empty> getDeleteUserMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteUser", requestType = com.google.shopping.merchant.accounts.v1.DeleteUserRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DeleteUserRequest, com.google.protobuf.Empty> getDeleteUserMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DeleteUserRequest, com.google.protobuf.Empty> getDeleteUserMethod; if ((getDeleteUserMethod = UserServiceGrpc.getDeleteUserMethod) == null) { synchronized (UserServiceGrpc.class) { if ((getDeleteUserMethod = UserServiceGrpc.getDeleteUserMethod) == null) { UserServiceGrpc.getDeleteUserMethod = getDeleteUserMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.DeleteUserRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteUser")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.DeleteUserRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("DeleteUser")) .build(); } } } return getDeleteUserMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateUserRequest, com.google.shopping.merchant.accounts.v1.User> getUpdateUserMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateUser", requestType = com.google.shopping.merchant.accounts.v1.UpdateUserRequest.class, responseType = com.google.shopping.merchant.accounts.v1.User.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateUserRequest, com.google.shopping.merchant.accounts.v1.User> getUpdateUserMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateUserRequest, com.google.shopping.merchant.accounts.v1.User> getUpdateUserMethod; if ((getUpdateUserMethod = UserServiceGrpc.getUpdateUserMethod) == null) { synchronized (UserServiceGrpc.class) { if ((getUpdateUserMethod = UserServiceGrpc.getUpdateUserMethod) == null) { UserServiceGrpc.getUpdateUserMethod = getUpdateUserMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.UpdateUserRequest, com.google.shopping.merchant.accounts.v1.User> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateUser")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.UpdateUserRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.User.getDefaultInstance())) .setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("UpdateUser")) .build(); } } } return getUpdateUserMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ListUsersRequest, com.google.shopping.merchant.accounts.v1.ListUsersResponse> getListUsersMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListUsers", requestType = com.google.shopping.merchant.accounts.v1.ListUsersRequest.class, responseType = com.google.shopping.merchant.accounts.v1.ListUsersResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ListUsersRequest, com.google.shopping.merchant.accounts.v1.ListUsersResponse> getListUsersMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ListUsersRequest, com.google.shopping.merchant.accounts.v1.ListUsersResponse> getListUsersMethod; if ((getListUsersMethod = UserServiceGrpc.getListUsersMethod) == null) { synchronized (UserServiceGrpc.class) { if ((getListUsersMethod = UserServiceGrpc.getListUsersMethod) == null) { UserServiceGrpc.getListUsersMethod = getListUsersMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.ListUsersRequest, com.google.shopping.merchant.accounts.v1.ListUsersResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListUsers")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.ListUsersRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.ListUsersResponse .getDefaultInstance())) .setSchemaDescriptor(new UserServiceMethodDescriptorSupplier("ListUsers")) .build(); } } } return getListUsersMethod; } /** Creates a new async stub that supports all call types for the service */ public static UserServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserServiceStub>() { @java.lang.Override public UserServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceStub(channel, callOptions); } }; return UserServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static UserServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingV2Stub>() { @java.lang.Override public UserServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceBlockingV2Stub(channel, callOptions); } }; return UserServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static UserServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserServiceBlockingStub>() { @java.lang.Override public UserServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceBlockingStub(channel, callOptions); } }; return UserServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static UserServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserServiceFutureStub>() { @java.lang.Override public UserServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceFutureStub(channel, callOptions); } }; return UserServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service to support user API. * </pre> */ public interface AsyncService { /** * * * <pre> * Retrieves a Merchant Center account user. * </pre> */ default void getUser( com.google.shopping.merchant.accounts.v1.GetUserRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetUserMethod(), responseObserver); } /** * * * <pre> * Creates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ default void createUser( com.google.shopping.merchant.accounts.v1.CreateUserRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateUserMethod(), responseObserver); } /** * * * <pre> * Deletes a Merchant Center account user. Executing this method requires * admin access. The user to be deleted can't be the last admin user of that * account. Also a user is protected from deletion if it * is managed by Business Manager" * </pre> */ default void deleteUser( com.google.shopping.merchant.accounts.v1.DeleteUserRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeleteUserMethod(), responseObserver); } /** * * * <pre> * Updates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ default void updateUser( com.google.shopping.merchant.accounts.v1.UpdateUserRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getUpdateUserMethod(), responseObserver); } /** * * * <pre> * Lists all users of a Merchant Center account. * </pre> */ default void listUsers( com.google.shopping.merchant.accounts.v1.ListUsersRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.ListUsersResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListUsersMethod(), responseObserver); } } /** * Base class for the server implementation of the service UserService. * * <pre> * Service to support user API. * </pre> */ public abstract static class UserServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return UserServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service UserService. * * <pre> * Service to support user API. * </pre> */ public static final class UserServiceStub extends io.grpc.stub.AbstractAsyncStub<UserServiceStub> { private UserServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceStub(channel, callOptions); } /** * * * <pre> * Retrieves a Merchant Center account user. * </pre> */ public void getUser( com.google.shopping.merchant.accounts.v1.GetUserRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetUserMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public void createUser( com.google.shopping.merchant.accounts.v1.CreateUserRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateUserMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes a Merchant Center account user. Executing this method requires * admin access. The user to be deleted can't be the last admin user of that * account. Also a user is protected from deletion if it * is managed by Business Manager" * </pre> */ public void deleteUser( com.google.shopping.merchant.accounts.v1.DeleteUserRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteUserMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public void updateUser( com.google.shopping.merchant.accounts.v1.UpdateUserRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateUserMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Lists all users of a Merchant Center account. * </pre> */ public void listUsers( com.google.shopping.merchant.accounts.v1.ListUsersRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.ListUsersResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListUsersMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service UserService. * * <pre> * Service to support user API. * </pre> */ public static final class UserServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<UserServiceBlockingV2Stub> { private UserServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Retrieves a Merchant Center account user. * </pre> */ public com.google.shopping.merchant.accounts.v1.User getUser( com.google.shopping.merchant.accounts.v1.GetUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetUserMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.User createUser( com.google.shopping.merchant.accounts.v1.CreateUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateUserMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a Merchant Center account user. Executing this method requires * admin access. The user to be deleted can't be the last admin user of that * account. Also a user is protected from deletion if it * is managed by Business Manager" * </pre> */ public com.google.protobuf.Empty deleteUser( com.google.shopping.merchant.accounts.v1.DeleteUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteUserMethod(), getCallOptions(), request); } /** * * * <pre> * Updates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.User updateUser( com.google.shopping.merchant.accounts.v1.UpdateUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateUserMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all users of a Merchant Center account. * </pre> */ public com.google.shopping.merchant.accounts.v1.ListUsersResponse listUsers( com.google.shopping.merchant.accounts.v1.ListUsersRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListUsersMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service UserService. * * <pre> * Service to support user API. * </pre> */ public static final class UserServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<UserServiceBlockingStub> { private UserServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Retrieves a Merchant Center account user. * </pre> */ public com.google.shopping.merchant.accounts.v1.User getUser( com.google.shopping.merchant.accounts.v1.GetUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetUserMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.User createUser( com.google.shopping.merchant.accounts.v1.CreateUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateUserMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a Merchant Center account user. Executing this method requires * admin access. The user to be deleted can't be the last admin user of that * account. Also a user is protected from deletion if it * is managed by Business Manager" * </pre> */ public com.google.protobuf.Empty deleteUser( com.google.shopping.merchant.accounts.v1.DeleteUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteUserMethod(), getCallOptions(), request); } /** * * * <pre> * Updates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.User updateUser( com.google.shopping.merchant.accounts.v1.UpdateUserRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateUserMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all users of a Merchant Center account. * </pre> */ public com.google.shopping.merchant.accounts.v1.ListUsersResponse listUsers( com.google.shopping.merchant.accounts.v1.ListUsersRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListUsersMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service UserService. * * <pre> * Service to support user API. * </pre> */ public static final class UserServiceFutureStub extends io.grpc.stub.AbstractFutureStub<UserServiceFutureStub> { private UserServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserServiceFutureStub(channel, callOptions); } /** * * * <pre> * Retrieves a Merchant Center account user. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.User> getUser(com.google.shopping.merchant.accounts.v1.GetUserRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetUserMethod(), getCallOptions()), request); } /** * * * <pre> * Creates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.User> createUser(com.google.shopping.merchant.accounts.v1.CreateUserRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateUserMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes a Merchant Center account user. Executing this method requires * admin access. The user to be deleted can't be the last admin user of that * account. Also a user is protected from deletion if it * is managed by Business Manager" * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteUser( com.google.shopping.merchant.accounts.v1.DeleteUserRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteUserMethod(), getCallOptions()), request); } /** * * * <pre> * Updates a Merchant Center account user. Executing this method requires * admin access. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.User> updateUser(com.google.shopping.merchant.accounts.v1.UpdateUserRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateUserMethod(), getCallOptions()), request); } /** * * * <pre> * Lists all users of a Merchant Center account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.ListUsersResponse> listUsers(com.google.shopping.merchant.accounts.v1.ListUsersRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListUsersMethod(), getCallOptions()), request); } } private static final int METHODID_GET_USER = 0; private static final int METHODID_CREATE_USER = 1; private static final int METHODID_DELETE_USER = 2; private static final int METHODID_UPDATE_USER = 3; private static final int METHODID_LIST_USERS = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_USER: serviceImpl.getUser( (com.google.shopping.merchant.accounts.v1.GetUserRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User>) responseObserver); break; case METHODID_CREATE_USER: serviceImpl.createUser( (com.google.shopping.merchant.accounts.v1.CreateUserRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User>) responseObserver); break; case METHODID_DELETE_USER: serviceImpl.deleteUser( (com.google.shopping.merchant.accounts.v1.DeleteUserRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; case METHODID_UPDATE_USER: serviceImpl.updateUser( (com.google.shopping.merchant.accounts.v1.UpdateUserRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.User>) responseObserver); break; case METHODID_LIST_USERS: serviceImpl.listUsers( (com.google.shopping.merchant.accounts.v1.ListUsersRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1.ListUsersResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetUserMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.GetUserRequest, com.google.shopping.merchant.accounts.v1.User>(service, METHODID_GET_USER))) .addMethod( getCreateUserMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.CreateUserRequest, com.google.shopping.merchant.accounts.v1.User>(service, METHODID_CREATE_USER))) .addMethod( getDeleteUserMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.DeleteUserRequest, com.google.protobuf.Empty>(service, METHODID_DELETE_USER))) .addMethod( getUpdateUserMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.UpdateUserRequest, com.google.shopping.merchant.accounts.v1.User>(service, METHODID_UPDATE_USER))) .addMethod( getListUsersMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.ListUsersRequest, com.google.shopping.merchant.accounts.v1.ListUsersResponse>( service, METHODID_LIST_USERS))) .build(); } private abstract static class UserServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { UserServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.shopping.merchant.accounts.v1.UserProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("UserService"); } } private static final class UserServiceFileDescriptorSupplier extends UserServiceBaseDescriptorSupplier { UserServiceFileDescriptorSupplier() {} } private static final class UserServiceMethodDescriptorSupplier extends UserServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; UserServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (UserServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new UserServiceFileDescriptorSupplier()) .addMethod(getGetUserMethod()) .addMethod(getCreateUserMethod()) .addMethod(getDeleteUserMethod()) .addMethod(getUpdateUserMethod()) .addMethod(getListUsersMethod()) .build(); } } } return result; } }
apache/hbase
37,059
hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestBulkLoadHFiles.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.tool; import static org.apache.hadoop.hbase.HBaseTestingUtil.countRows; import static org.apache.hadoop.hbase.util.LocatedBlockHelper.getLocatedBlockLocations; import static org.hamcrest.Matchers.greaterThan; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.net.InetAddress; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.AsyncClusterConnection; import org.apache.hadoop.hbase.client.AsyncTableRegionLocator; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FutureUtils; import org.apache.hadoop.hbase.util.HFileTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.hamcrest.MatcherAssert; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; /** * Test cases for the "load" half of the HFileOutputFormat bulk load functionality. These tests run * faster than the full MR cluster tests in TestHFileOutputFormat */ @Category({ MiscTests.class, LargeTests.class }) public class TestBulkLoadHFiles { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestBulkLoadHFiles.class); @Rule public TestName tn = new TestName(); private static final byte[] QUALIFIER = Bytes.toBytes("myqual"); private static final byte[] FAMILY = Bytes.toBytes("myfam"); private static final String NAMESPACE = "bulkNS"; static final String EXPECTED_MSG_FOR_NON_EXISTING_FAMILY = "Unmatched family names found"; static final int MAX_FILES_PER_REGION_PER_FAMILY = 4; private static final byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ppp") }; static HBaseTestingUtil util = new HBaseTestingUtil(); @BeforeClass public static void setUpBeforeClass() throws Exception { util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); util.getConfiguration().setInt(BulkLoadHFiles.MAX_FILES_PER_REGION_PER_FAMILY, MAX_FILES_PER_REGION_PER_FAMILY); // change default behavior so that tag values are returned with normal rpcs util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY, KeyValueCodecWithTags.class.getCanonicalName()); util.startMiniCluster(); setupNamespace(); } protected static void setupNamespace() throws Exception { util.getAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build()); } @AfterClass public static void tearDownAfterClass() throws Exception { util.shutdownMiniCluster(); } @Test public void testSimpleLoadWithMap() throws Exception { runTest("testSimpleLoadWithMap", BloomType.NONE, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }, true); } /** * Test case that creates some regions and loads HFiles that fit snugly inside those regions */ @Test public void testSimpleLoad() throws Exception { runTest("testSimpleLoad", BloomType.NONE, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }); } @Test public void testSimpleLoadWithFileCopy() throws Exception { String testName = tn.getMethodName(); final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); runTest(testName, buildHTD(TableName.valueOf(TABLE_NAME), BloomType.NONE), false, null, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }, false, true, 2); } /** * Test case that creates some regions and loads HFiles that cross the boundaries of those regions */ @Test public void testRegionCrossingLoad() throws Exception { runTest("testRegionCrossingLoad", BloomType.NONE, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); } /** * Test loading into a column family that has a ROW bloom filter. */ @Test public void testRegionCrossingRowBloom() throws Exception { runTest("testRegionCrossingLoadRowBloom", BloomType.ROW, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); } /** * Test loading into a column family that has a ROWCOL bloom filter. */ @Test public void testRegionCrossingRowColBloom() throws Exception { runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); } /** * Test case that creates some regions and loads HFiles that have different region boundaries than * the table pre-split. */ @Test public void testSimpleHFileSplit() throws Exception { runTest("testHFileSplit", BloomType.NONE, new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("lll") }, new byte[][] { Bytes.toBytes("mmm"), Bytes.toBytes("zzz") }, }); } /** * Test case that creates some regions and loads HFiles that cross the boundaries and have * different region boundaries than the table pre-split. */ @Test public void testRegionCrossingHFileSplit() throws Exception { testRegionCrossingHFileSplit(BloomType.NONE); } /** * Test case that creates some regions and loads HFiles that cross the boundaries have a ROW bloom * filter and a different region boundaries than the table pre-split. */ @Test public void testRegionCrossingHFileSplitRowBloom() throws Exception { testRegionCrossingHFileSplit(BloomType.ROW); } /** * Test case that creates some regions and loads HFiles that cross the boundaries have a ROWCOL * bloom filter and a different region boundaries than the table pre-split. */ @Test public void testRegionCrossingHFileSplitRowColBloom() throws Exception { testRegionCrossingHFileSplit(BloomType.ROWCOL); } @Test public void testSplitALot() throws Exception { runTest("testSplitALot", BloomType.NONE, new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("bbb"), Bytes.toBytes("ccc"), Bytes.toBytes("ddd"), Bytes.toBytes("eee"), Bytes.toBytes("fff"), Bytes.toBytes("ggg"), Bytes.toBytes("hhh"), Bytes.toBytes("iii"), Bytes.toBytes("lll"), Bytes.toBytes("mmm"), Bytes.toBytes("nnn"), Bytes.toBytes("ooo"), Bytes.toBytes("ppp"), Bytes.toBytes("qqq"), Bytes.toBytes("rrr"), Bytes.toBytes("sss"), Bytes.toBytes("ttt"), Bytes.toBytes("uuu"), Bytes.toBytes("vvv"), Bytes.toBytes("zzz"), }, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("zzz") }, }); } private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception { runTest("testHFileSplit" + bloomType + "Bloom", bloomType, new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }, new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); } private TableDescriptor buildHTD(TableName tableName, BloomType bloomType) { return TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBloomFilterType(bloomType).build()) .build(); } private void runTest(String testName, BloomType bloomType, byte[][][] hfileRanges) throws Exception { runTest(testName, bloomType, null, hfileRanges); } private void runTest(String testName, BloomType bloomType, byte[][][] hfileRanges, boolean useMap) throws Exception { runTest(testName, bloomType, null, hfileRanges, useMap); } private void runTest(String testName, BloomType bloomType, byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception { runTest(testName, bloomType, tableSplitKeys, hfileRanges, false); } private void runTest(String testName, BloomType bloomType, byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap) throws Exception { final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); final boolean preCreateTable = tableSplitKeys != null; // Run the test bulkloading the table to the default namespace final TableName TABLE_WITHOUT_NS = TableName.valueOf(TABLE_NAME); runTest(testName, TABLE_WITHOUT_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, useMap, 2); /* * Run the test bulkloading the table from a depth of 3 directory structure is now baseDirectory * -- regionDir -- familyDir -- storeFileDir */ if (preCreateTable) { runTest(testName + 2, TABLE_WITHOUT_NS, bloomType, true, tableSplitKeys, hfileRanges, false, 3); } // Run the test bulkloading the table to the specified namespace final TableName TABLE_WITH_NS = TableName.valueOf(Bytes.toBytes(NAMESPACE), TABLE_NAME); runTest(testName, TABLE_WITH_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, useMap, 2); } private void runTest(String testName, TableName tableName, BloomType bloomType, boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap, int depth) throws Exception { TableDescriptor htd = buildHTD(tableName, bloomType); runTest(testName, htd, preCreateTable, tableSplitKeys, hfileRanges, useMap, false, depth); } public static int loadHFiles(String testName, TableDescriptor htd, HBaseTestingUtil util, byte[] fam, byte[] qual, boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap, boolean deleteFile, boolean copyFiles, int initRowCount, int factor, int depth) throws Exception { Path baseDirectory = util.getDataTestDirOnTestFS(testName); FileSystem fs = util.getTestFileSystem(); baseDirectory = baseDirectory.makeQualified(fs.getUri(), fs.getWorkingDirectory()); Path parentDir = baseDirectory; if (depth == 3) { assert !useMap; parentDir = new Path(baseDirectory, "someRegion"); } Path familyDir = new Path(parentDir, Bytes.toString(fam)); int hfileIdx = 0; Map<byte[], List<Path>> map = null; List<Path> list = null; if (useMap || copyFiles) { list = new ArrayList<>(); } if (useMap) { map = new TreeMap<>(Bytes.BYTES_COMPARATOR); map.put(fam, list); } Path last = null; for (byte[][] range : hfileRanges) { byte[] from = range[0]; byte[] to = range[1]; Path path = new Path(familyDir, "hfile_" + hfileIdx++); HFileTestUtil.createHFile(util.getConfiguration(), fs, path, fam, qual, from, to, factor); if (useMap) { last = path; list.add(path); } } int expectedRows = hfileIdx * factor; TableName tableName = htd.getTableName(); if (!util.getAdmin().tableExists(tableName) && (preCreateTable || map != null)) { if (tableSplitKeys != null) { util.getAdmin().createTable(htd, tableSplitKeys); } else { util.getAdmin().createTable(htd); } } Configuration conf = util.getConfiguration(); if (copyFiles) { conf.setBoolean(BulkLoadHFiles.ALWAYS_COPY_FILES, true); } BulkLoadHFilesTool loader = new BulkLoadHFilesTool(conf); List<String> args = Lists.newArrayList(baseDirectory.toString(), tableName.toString()); if (depth == 3) { args.add("-loadTable"); } if (useMap) { if (deleteFile) { fs.delete(last, true); } Map<BulkLoadHFiles.LoadQueueItem, ByteBuffer> loaded = loader.bulkLoad(tableName, map); if (deleteFile) { expectedRows -= 1000; for (BulkLoadHFiles.LoadQueueItem item : loaded.keySet()) { if (item.getFilePath().getName().equals(last.getName())) { fail(last + " should be missing"); } } } } else { loader.run(args.toArray(new String[] {})); } if (copyFiles) { for (Path p : list) { assertTrue(p + " should exist", fs.exists(p)); } } try (Table table = util.getConnection().getTable(tableName)) { assertEquals(initRowCount + expectedRows, countRows(table)); } return expectedRows; } private void runTest(String testName, TableDescriptor htd, boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap, boolean copyFiles, int depth) throws Exception { loadHFiles(testName, htd, util, FAMILY, QUALIFIER, preCreateTable, tableSplitKeys, hfileRanges, useMap, true, copyFiles, 0, 1000, depth); final TableName tableName = htd.getTableName(); // verify staging folder has been cleaned up Path stagingBasePath = new Path(CommonFSUtils.getRootDir(util.getConfiguration()), HConstants.BULKLOAD_STAGING_DIR_NAME); FileSystem fs = util.getTestFileSystem(); if (fs.exists(stagingBasePath)) { FileStatus[] files = fs.listStatus(stagingBasePath); for (FileStatus file : files) { assertTrue("Folder=" + file.getPath() + " is not cleaned up.", file.getPath().getName() != "DONOTERASE"); } } util.deleteTable(tableName); } /** * Test that tags survive through a bulk load that needs to split hfiles. This test depends on the * "hbase.client.rpc.codec" = KeyValueCodecWithTags so that the client can get tags in the * responses. */ @Test public void testTagsSurviveBulkLoadSplit() throws Exception { Path dir = util.getDataTestDirOnTestFS(tn.getMethodName()); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); Path familyDir = new Path(dir, Bytes.toString(FAMILY)); // table has these split points byte[][] tableSplitKeys = new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }; // creating an hfile that has values that span the split points. byte[] from = Bytes.toBytes("ddd"); byte[] to = Bytes.toBytes("ooo"); HFileTestUtil.createHFileWithTags(util.getConfiguration(), fs, new Path(familyDir, tn.getMethodName() + "_hfile"), FAMILY, QUALIFIER, from, to, 1000); int expectedRows = 1000; TableName tableName = TableName.valueOf(tn.getMethodName()); TableDescriptor htd = buildHTD(tableName, BloomType.NONE); util.getAdmin().createTable(htd, tableSplitKeys); BulkLoadHFiles.create(util.getConfiguration()).bulkLoad(tableName, dir); Table table = util.getConnection().getTable(tableName); try { assertEquals(expectedRows, countRows(table)); HFileTestUtil.verifyTags(table); } finally { table.close(); } util.deleteTable(tableName); } /** * Test loading into a column family that does not exist. */ @Test public void testNonexistentColumnFamilyLoad() throws Exception { String testName = tn.getMethodName(); byte[][][] hFileRanges = new byte[][][] { new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("ccc") }, new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }; byte[] TABLE = Bytes.toBytes("mytable_" + testName); // set real family name to upper case in purpose to simulate the case that // family name in HFiles is invalid TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE)) .setColumnFamily(ColumnFamilyDescriptorBuilder .of(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT)))) .build(); try { runTest(testName, htd, true, SPLIT_KEYS, hFileRanges, false, false, 2); assertTrue("Loading into table with non-existent family should have failed", false); } catch (Exception e) { assertTrue("IOException expected", e instanceof IOException); // further check whether the exception message is correct String errMsg = e.getMessage(); assertTrue( "Incorrect exception message, expected message: [" + EXPECTED_MSG_FOR_NON_EXISTING_FAMILY + "], current message: [" + errMsg + "]", errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY)); } } @Test public void testNonHfileFolderWithUnmatchedFamilyName() throws Exception { testNonHfileFolder("testNonHfileFolderWithUnmatchedFamilyName", true); } @Test public void testNonHfileFolder() throws Exception { testNonHfileFolder("testNonHfileFolder", false); } /** * Write a random data file and a non-file in a dir with a valid family name but not part of the * table families. we should we able to bulkload without getting the unmatched family exception. * HBASE-13037/HBASE-13227 */ private void testNonHfileFolder(String tableName, boolean preCreateTable) throws Exception { Path dir = util.getDataTestDirOnTestFS(tableName); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); Path familyDir = new Path(dir, Bytes.toString(FAMILY)); HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_0"), FAMILY, QUALIFIER, Bytes.toBytes("begin"), Bytes.toBytes("end"), 500); createRandomDataFile(fs, new Path(familyDir, "012356789"), 16 * 1024); final String NON_FAMILY_FOLDER = "_logs"; Path nonFamilyDir = new Path(dir, NON_FAMILY_FOLDER); fs.mkdirs(nonFamilyDir); fs.mkdirs(new Path(nonFamilyDir, "non-file")); createRandomDataFile(fs, new Path(nonFamilyDir, "012356789"), 16 * 1024); Table table = null; try { if (preCreateTable) { table = util.createTable(TableName.valueOf(tableName), FAMILY); } else { table = util.getConnection().getTable(TableName.valueOf(tableName)); } BulkLoadHFiles.create(util.getConfiguration()).bulkLoad(TableName.valueOf(tableName), dir); assertEquals(500, countRows(table)); } finally { if (table != null) { table.close(); } fs.delete(dir, true); } } private static void createRandomDataFile(FileSystem fs, Path path, int size) throws IOException { FSDataOutputStream stream = fs.create(path); try { byte[] data = new byte[1024]; for (int i = 0; i < data.length; ++i) { data[i] = (byte) (i & 0xff); } while (size >= data.length) { stream.write(data, 0, data.length); size -= data.length; } if (size > 0) { stream.write(data, 0, size); } } finally { stream.close(); } } @Test public void testSplitStoreFile() throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFile"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); String tableName = tn.getMethodName(); util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); Path bottomOut = new Path(dir, "bottom.out"); Path topOut = new Path(dir, "top.out"); BulkLoadHFilesTool.splitStoreFile( util.getAsyncConnection().getRegionLocator(TableName.valueOf(tableName)), util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut); int rowCount = verifyHFile(bottomOut); rowCount += verifyHFile(topOut); assertEquals(1000, rowCount); } /** * Test hfile splits with the favored nodes */ @Test public void testSplitStoreFileWithFavoriteNodes() throws IOException { Path dir = new Path(util.getDefaultRootDirPath(), "testhfile"); FileSystem fs = util.getDFSCluster().getFileSystem(); Path testIn = new Path(dir, "testSplitStoreFileWithFavoriteNodes"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); String tableName = tn.getMethodName(); Table table = util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); Path bottomOut = new Path(dir, "bottom.out"); Path topOut = new Path(dir, "top.out"); final AsyncTableRegionLocator regionLocator = util.getAsyncConnection().getRegionLocator(TableName.valueOf(tableName)); BulkLoadHFilesTool.splitStoreFile(regionLocator, util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut); verifyHFileFavoriteNode(topOut, regionLocator, fs); verifyHFileFavoriteNode(bottomOut, regionLocator, fs); int rowCount = verifyHFile(bottomOut); rowCount += verifyHFile(topOut); assertEquals(1000, rowCount); } @Test public void testSplitStoreFileWithCreateTimeTS() throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); String tableName = tn.getMethodName(); util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); Path bottomOut = new Path(dir, "bottom.out"); Path topOut = new Path(dir, "top.out"); BulkLoadHFilesTool.splitStoreFile( util.getAsyncConnection().getRegionLocator(TableName.valueOf(tableName)), util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut); verifyHFileCreateTimeTS(bottomOut); verifyHFileCreateTimeTS(topOut); } @Test public void testSplitStoreFileWithNoneToNone() throws IOException { testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE); } @Test public void testSplitStoreFileWithEncodedToEncoded() throws IOException { testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF); } @Test public void testSplitStoreFileWithEncodedToNone() throws IOException { testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE); } @Test public void testSplitStoreFileWithNoneToEncoded() throws IOException { testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF); } private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding, DataBlockEncoding cfEncoding) throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setDataBlockEncoding(cfEncoding).build(); String tableName = tn.getMethodName(); util.createTable(TableName.valueOf(tableName), familyDesc.getNameAsString()); HFileTestUtil.createHFileWithDataBlockEncoding(util.getConfiguration(), fs, testIn, bulkloadEncoding, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); Path bottomOut = new Path(dir, "bottom.out"); Path topOut = new Path(dir, "top.out"); BulkLoadHFilesTool.splitStoreFile( util.getAsyncConnection().getRegionLocator(TableName.valueOf(tableName)), util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut); int rowCount = verifyHFile(bottomOut); rowCount += verifyHFile(topOut); assertEquals(1000, rowCount); } private int verifyHFile(Path p) throws IOException { Configuration conf = util.getConfiguration(); HFile.Reader reader = HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf); HFileScanner scanner = reader.getScanner(conf, false, false); scanner.seekTo(); int count = 0; do { count++; } while (scanner.next()); assertTrue(count > 0); reader.close(); return count; } private void verifyHFileCreateTimeTS(Path p) throws IOException { Configuration conf = util.getConfiguration(); try (HFile.Reader reader = HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf)) { long fileCreateTime = reader.getHFileInfo().getHFileContext().getFileCreateTime(); MatcherAssert.assertThat(fileCreateTime, greaterThan(0L)); } } /** * test split storefile with favorite node information */ private void verifyHFileFavoriteNode(Path p, AsyncTableRegionLocator regionLocator, FileSystem fs) throws IOException { Configuration conf = util.getConfiguration(); try (HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf);) { final byte[] firstRowkey = reader.getFirstRowKey().get(); final HRegionLocation hRegionLocation = FutureUtils.get(regionLocator.getRegionLocation(firstRowkey)); final String targetHostName = hRegionLocation.getHostname(); if (fs instanceof DistributedFileSystem) { String pathStr = p.toUri().getPath(); LocatedBlocks blocks = ((DistributedFileSystem) fs).getClient().getLocatedBlocks(pathStr, 0L); boolean isFavoriteNode = false; List<LocatedBlock> locatedBlocks = blocks.getLocatedBlocks(); int index = 0; do { if (index > 0) { assertTrue("failed use favored nodes", isFavoriteNode); } isFavoriteNode = false; final LocatedBlock block = locatedBlocks.get(index); final DatanodeInfo[] locations = getLocatedBlockLocations(block); for (DatanodeInfo location : locations) { final String hostName = location.getHostName(); if ( targetHostName.equals(hostName.equals("127.0.0.1") ? InetAddress.getLocalHost().getHostName() : "127.0.0.1") || targetHostName.equals(hostName) ) { isFavoriteNode = true; break; } } index++; } while (index < locatedBlocks.size()); if (index > 0) { assertTrue("failed use favored nodes", isFavoriteNode); } } } } private void addStartEndKeysForTest(TreeMap<byte[], Integer> map, byte[] first, byte[] last) { Integer value = map.containsKey(first) ? map.get(first) : 0; map.put(first, value + 1); value = map.containsKey(last) ? map.get(last) : 0; map.put(last, value - 1); } @Test public void testInferBoundaries() { TreeMap<byte[], Integer> map = new TreeMap<>(Bytes.BYTES_COMPARATOR); /* * Toy example c---------i o------p s---------t v------x a------e g-----k m-------------q r----s * u----w Should be inferred as: a-----------------k m-------------q r--------------t * u---------x The output should be (m,r,u) */ String first; String last; first = "a"; last = "e"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "r"; last = "s"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "o"; last = "p"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "g"; last = "k"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "v"; last = "x"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "c"; last = "i"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "m"; last = "q"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "s"; last = "t"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); first = "u"; last = "w"; addStartEndKeysForTest(map, Bytes.toBytes(first), Bytes.toBytes(last)); byte[][] keysArray = BulkLoadHFilesTool.inferBoundaries(map); byte[][] compare = new byte[3][]; compare[0] = Bytes.toBytes("m"); compare[1] = Bytes.toBytes("r"); compare[2] = Bytes.toBytes("u"); assertEquals(3, keysArray.length); for (int row = 0; row < keysArray.length; row++) { assertArrayEquals(keysArray[row], compare[row]); } } @Test public void testLoadTooMayHFiles() throws Exception { Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles"); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); Path familyDir = new Path(dir, Bytes.toString(FAMILY)); byte[] from = Bytes.toBytes("begin"); byte[] to = Bytes.toBytes("end"); for (int i = 0; i <= MAX_FILES_PER_REGION_PER_FAMILY; i++) { HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_" + i), FAMILY, QUALIFIER, from, to, 1000); } try { BulkLoadHFiles.create(util.getConfiguration()) .bulkLoad(TableName.valueOf("mytable_testLoadTooMayHFiles"), dir); fail("Bulk loading too many files should fail"); } catch (IOException ie) { assertTrue(ie.getMessage() .contains("Trying to load more than " + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles")); } } @Test(expected = TableNotFoundException.class) public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception { Configuration conf = util.getConfiguration(); conf.set(BulkLoadHFiles.CREATE_TABLE_CONF_KEY, "no"); BulkLoadHFilesTool loader = new BulkLoadHFilesTool(conf); String[] args = { "directory", "nonExistingTable" }; loader.run(args); } @Test public void testTableWithCFNameStartWithUnderScore() throws Exception { Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore"); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); String family = "_cf"; Path familyDir = new Path(dir, family); byte[] from = Bytes.toBytes("begin"); byte[] to = Bytes.toBytes("end"); Configuration conf = util.getConfiguration(); String tableName = tn.getMethodName(); try (Table table = util.createTable(TableName.valueOf(tableName), family)) { HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family), QUALIFIER, from, to, 1000); BulkLoadHFiles.create(conf).bulkLoad(table.getName(), dir); assertEquals(1000, countRows(table)); } } @Test public void testBulkLoadByFamily() throws Exception { Path dir = util.getDataTestDirOnTestFS("testBulkLoadByFamily"); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); String tableName = tn.getMethodName(); String[] families = { "cf1", "cf2", "cf3" }; for (int i = 0; i < families.length; i++) { byte[] from = Bytes.toBytes(i + "begin"); byte[] to = Bytes.toBytes(i + "end"); Path familyDir = new Path(dir, families[i]); HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile"), Bytes.toBytes(families[i]), QUALIFIER, from, to, 1000); } Table table = util.createTable(TableName.valueOf(tableName), families); final AtomicInteger attmptedCalls = new AtomicInteger(); util.getConfiguration().setBoolean(BulkLoadHFilesTool.BULK_LOAD_HFILES_BY_FAMILY, true); BulkLoadHFiles loader = new BulkLoadHFilesTool(util.getConfiguration()) { @Override protected CompletableFuture<Collection<LoadQueueItem>> tryAtomicRegionLoad( final AsyncClusterConnection conn, final TableName tableName, boolean copyFiles, final byte[] first, Collection<LoadQueueItem> lqis) { attmptedCalls.incrementAndGet(); return super.tryAtomicRegionLoad(conn, tableName, copyFiles, first, lqis); } }; try { loader.bulkLoad(table.getName(), dir); assertEquals(families.length, attmptedCalls.get()); assertEquals(1000 * families.length, HBaseTestingUtil.countRows(table)); } finally { if (null != table) { table.close(); } util.getConfiguration().setBoolean(BulkLoadHFilesTool.BULK_LOAD_HFILES_BY_FAMILY, false); } } @Test public void testFailIfNeedSplitHFile() throws IOException { TableName tableName = TableName.valueOf(tn.getMethodName()); Table table = util.createTable(tableName, FAMILY); util.loadTable(table, FAMILY); FileSystem fs = util.getTestFileSystem(); Path sfPath = new Path(fs.getWorkingDirectory(), new Path(Bytes.toString(FAMILY), "file")); HFileTestUtil.createHFile(util.getConfiguration(), fs, sfPath, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); util.getAdmin().split(tableName); util.waitFor(10000, 1000, () -> util.getAdmin().getRegions(tableName).size() > 1); Configuration config = new Configuration(util.getConfiguration()); config.setBoolean(BulkLoadHFilesTool.FAIL_IF_NEED_SPLIT_HFILE, true); BulkLoadHFilesTool tool = new BulkLoadHFilesTool(config); String[] args = new String[] { fs.getWorkingDirectory().toString(), tableName.toString() }; assertThrows(IOException.class, () -> tool.run(args)); util.getHBaseCluster().getRegions(tableName) .forEach(r -> assertEquals(1, r.getStore(FAMILY).getStorefiles().size())); } }
googleapis/google-cloud-java
37,051
java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/PropertySummary.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1alpha/resources.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1alpha; /** * * * <pre> * A virtual resource representing metadata for a Google Analytics property. * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.PropertySummary} */ public final class PropertySummary extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.PropertySummary) PropertySummaryOrBuilder { private static final long serialVersionUID = 0L; // Use PropertySummary.newBuilder() to construct. private PropertySummary(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PropertySummary() { property_ = ""; displayName_ = ""; propertyType_ = 0; parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PropertySummary(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.ResourcesProto .internal_static_google_analytics_admin_v1alpha_PropertySummary_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.ResourcesProto .internal_static_google_analytics_admin_v1alpha_PropertySummary_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.PropertySummary.class, com.google.analytics.admin.v1alpha.PropertySummary.Builder.class); } public static final int PROPERTY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object property_ = ""; /** * * * <pre> * Resource name of property referred to by this property summary * Format: properties/{property_id} * Example: "properties/1000" * </pre> * * <code>string property = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The property. */ @java.lang.Override public java.lang.String getProperty() { java.lang.Object ref = property_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); property_ = s; return s; } } /** * * * <pre> * Resource name of property referred to by this property summary * Format: properties/{property_id} * Example: "properties/1000" * </pre> * * <code>string property = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for property. */ @java.lang.Override public com.google.protobuf.ByteString getPropertyBytes() { java.lang.Object ref = property_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); property_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DISPLAY_NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object displayName_ = ""; /** * * * <pre> * Display name for the property referred to in this property summary. * </pre> * * <code>string display_name = 2;</code> * * @return The displayName. */ @java.lang.Override public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } } /** * * * <pre> * Display name for the property referred to in this property summary. * </pre> * * <code>string display_name = 2;</code> * * @return The bytes for displayName. */ @java.lang.Override public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PROPERTY_TYPE_FIELD_NUMBER = 3; private int propertyType_ = 0; /** * * * <pre> * The property's property type. * </pre> * * <code>.google.analytics.admin.v1alpha.PropertyType property_type = 3;</code> * * @return The enum numeric value on the wire for propertyType. */ @java.lang.Override public int getPropertyTypeValue() { return propertyType_; } /** * * * <pre> * The property's property type. * </pre> * * <code>.google.analytics.admin.v1alpha.PropertyType property_type = 3;</code> * * @return The propertyType. */ @java.lang.Override public com.google.analytics.admin.v1alpha.PropertyType getPropertyType() { com.google.analytics.admin.v1alpha.PropertyType result = com.google.analytics.admin.v1alpha.PropertyType.forNumber(propertyType_); return result == null ? com.google.analytics.admin.v1alpha.PropertyType.UNRECOGNIZED : result; } public static final int PARENT_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Resource name of this property's logical parent. * * Note: The Property-Moving UI can be used to change the parent. * Format: accounts/{account}, properties/{property} * Example: "accounts/100", "properties/200" * </pre> * * <code>string parent = 4;</code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Resource name of this property's logical parent. * * Note: The Property-Moving UI can be used to change the parent. * Format: accounts/{account}, properties/{property} * Example: "accounts/100", "properties/200" * </pre> * * <code>string parent = 4;</code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(property_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, property_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayName_); } if (propertyType_ != com.google.analytics.admin.v1alpha.PropertyType.PROPERTY_TYPE_UNSPECIFIED.getNumber()) { output.writeEnum(3, propertyType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, parent_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(property_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, property_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, displayName_); } if (propertyType_ != com.google.analytics.admin.v1alpha.PropertyType.PROPERTY_TYPE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, propertyType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, parent_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1alpha.PropertySummary)) { return super.equals(obj); } com.google.analytics.admin.v1alpha.PropertySummary other = (com.google.analytics.admin.v1alpha.PropertySummary) obj; if (!getProperty().equals(other.getProperty())) return false; if (!getDisplayName().equals(other.getDisplayName())) return false; if (propertyType_ != other.propertyType_) return false; if (!getParent().equals(other.getParent())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROPERTY_FIELD_NUMBER; hash = (53 * hash) + getProperty().hashCode(); hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getDisplayName().hashCode(); hash = (37 * hash) + PROPERTY_TYPE_FIELD_NUMBER; hash = (53 * hash) + propertyType_; hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.PropertySummary parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.PropertySummary parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.PropertySummary parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.analytics.admin.v1alpha.PropertySummary prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A virtual resource representing metadata for a Google Analytics property. * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.PropertySummary} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.PropertySummary) com.google.analytics.admin.v1alpha.PropertySummaryOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.ResourcesProto .internal_static_google_analytics_admin_v1alpha_PropertySummary_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.ResourcesProto .internal_static_google_analytics_admin_v1alpha_PropertySummary_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.PropertySummary.class, com.google.analytics.admin.v1alpha.PropertySummary.Builder.class); } // Construct using com.google.analytics.admin.v1alpha.PropertySummary.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; property_ = ""; displayName_ = ""; propertyType_ = 0; parent_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1alpha.ResourcesProto .internal_static_google_analytics_admin_v1alpha_PropertySummary_descriptor; } @java.lang.Override public com.google.analytics.admin.v1alpha.PropertySummary getDefaultInstanceForType() { return com.google.analytics.admin.v1alpha.PropertySummary.getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1alpha.PropertySummary build() { com.google.analytics.admin.v1alpha.PropertySummary result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1alpha.PropertySummary buildPartial() { com.google.analytics.admin.v1alpha.PropertySummary result = new com.google.analytics.admin.v1alpha.PropertySummary(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.analytics.admin.v1alpha.PropertySummary result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.property_ = property_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.displayName_ = displayName_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.propertyType_ = propertyType_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.parent_ = parent_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1alpha.PropertySummary) { return mergeFrom((com.google.analytics.admin.v1alpha.PropertySummary) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.admin.v1alpha.PropertySummary other) { if (other == com.google.analytics.admin.v1alpha.PropertySummary.getDefaultInstance()) return this; if (!other.getProperty().isEmpty()) { property_ = other.property_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getDisplayName().isEmpty()) { displayName_ = other.displayName_; bitField0_ |= 0x00000002; onChanged(); } if (other.propertyType_ != 0) { setPropertyTypeValue(other.getPropertyTypeValue()); } if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { property_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { displayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { propertyType_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object property_ = ""; /** * * * <pre> * Resource name of property referred to by this property summary * Format: properties/{property_id} * Example: "properties/1000" * </pre> * * <code>string property = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The property. */ public java.lang.String getProperty() { java.lang.Object ref = property_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); property_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Resource name of property referred to by this property summary * Format: properties/{property_id} * Example: "properties/1000" * </pre> * * <code>string property = 1 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for property. */ public com.google.protobuf.ByteString getPropertyBytes() { java.lang.Object ref = property_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); property_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Resource name of property referred to by this property summary * Format: properties/{property_id} * Example: "properties/1000" * </pre> * * <code>string property = 1 [(.google.api.resource_reference) = { ... }</code> * * @param value The property to set. * @return This builder for chaining. */ public Builder setProperty(java.lang.String value) { if (value == null) { throw new NullPointerException(); } property_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Resource name of property referred to by this property summary * Format: properties/{property_id} * Example: "properties/1000" * </pre> * * <code>string property = 1 [(.google.api.resource_reference) = { ... }</code> * * @return This builder for chaining. */ public Builder clearProperty() { property_ = getDefaultInstance().getProperty(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Resource name of property referred to by this property summary * Format: properties/{property_id} * Example: "properties/1000" * </pre> * * <code>string property = 1 [(.google.api.resource_reference) = { ... }</code> * * @param value The bytes for property to set. * @return This builder for chaining. */ public Builder setPropertyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); property_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object displayName_ = ""; /** * * * <pre> * Display name for the property referred to in this property summary. * </pre> * * <code>string display_name = 2;</code> * * @return The displayName. */ public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Display name for the property referred to in this property summary. * </pre> * * <code>string display_name = 2;</code> * * @return The bytes for displayName. */ public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Display name for the property referred to in this property summary. * </pre> * * <code>string display_name = 2;</code> * * @param value The displayName to set. * @return This builder for chaining. */ public Builder setDisplayName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } displayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Display name for the property referred to in this property summary. * </pre> * * <code>string display_name = 2;</code> * * @return This builder for chaining. */ public Builder clearDisplayName() { displayName_ = getDefaultInstance().getDisplayName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Display name for the property referred to in this property summary. * </pre> * * <code>string display_name = 2;</code> * * @param value The bytes for displayName to set. * @return This builder for chaining. */ public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); displayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int propertyType_ = 0; /** * * * <pre> * The property's property type. * </pre> * * <code>.google.analytics.admin.v1alpha.PropertyType property_type = 3;</code> * * @return The enum numeric value on the wire for propertyType. */ @java.lang.Override public int getPropertyTypeValue() { return propertyType_; } /** * * * <pre> * The property's property type. * </pre> * * <code>.google.analytics.admin.v1alpha.PropertyType property_type = 3;</code> * * @param value The enum numeric value on the wire for propertyType to set. * @return This builder for chaining. */ public Builder setPropertyTypeValue(int value) { propertyType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The property's property type. * </pre> * * <code>.google.analytics.admin.v1alpha.PropertyType property_type = 3;</code> * * @return The propertyType. */ @java.lang.Override public com.google.analytics.admin.v1alpha.PropertyType getPropertyType() { com.google.analytics.admin.v1alpha.PropertyType result = com.google.analytics.admin.v1alpha.PropertyType.forNumber(propertyType_); return result == null ? com.google.analytics.admin.v1alpha.PropertyType.UNRECOGNIZED : result; } /** * * * <pre> * The property's property type. * </pre> * * <code>.google.analytics.admin.v1alpha.PropertyType property_type = 3;</code> * * @param value The propertyType to set. * @return This builder for chaining. */ public Builder setPropertyType(com.google.analytics.admin.v1alpha.PropertyType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; propertyType_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The property's property type. * </pre> * * <code>.google.analytics.admin.v1alpha.PropertyType property_type = 3;</code> * * @return This builder for chaining. */ public Builder clearPropertyType() { bitField0_ = (bitField0_ & ~0x00000004); propertyType_ = 0; onChanged(); return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Resource name of this property's logical parent. * * Note: The Property-Moving UI can be used to change the parent. * Format: accounts/{account}, properties/{property} * Example: "accounts/100", "properties/200" * </pre> * * <code>string parent = 4;</code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Resource name of this property's logical parent. * * Note: The Property-Moving UI can be used to change the parent. * Format: accounts/{account}, properties/{property} * Example: "accounts/100", "properties/200" * </pre> * * <code>string parent = 4;</code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Resource name of this property's logical parent. * * Note: The Property-Moving UI can be used to change the parent. * Format: accounts/{account}, properties/{property} * Example: "accounts/100", "properties/200" * </pre> * * <code>string parent = 4;</code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Resource name of this property's logical parent. * * Note: The Property-Moving UI can be used to change the parent. * Format: accounts/{account}, properties/{property} * Example: "accounts/100", "properties/200" * </pre> * * <code>string parent = 4;</code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Resource name of this property's logical parent. * * Note: The Property-Moving UI can be used to change the parent. * Format: accounts/{account}, properties/{property} * Example: "accounts/100", "properties/200" * </pre> * * <code>string parent = 4;</code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.PropertySummary) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.PropertySummary) private static final com.google.analytics.admin.v1alpha.PropertySummary DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.PropertySummary(); } public static com.google.analytics.admin.v1alpha.PropertySummary getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PropertySummary> PARSER = new com.google.protobuf.AbstractParser<PropertySummary>() { @java.lang.Override public PropertySummary parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PropertySummary> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PropertySummary> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1alpha.PropertySummary getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ignite
37,219
modules/web/src/main/java/org/apache/ignite/cache/websession/WebSessionFilter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.websession; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; import javax.cache.CacheException; import javax.cache.expiry.Duration; import javax.cache.expiry.ExpiryPolicy; import javax.cache.expiry.ModifiedExpiryPolicy; import jakarta.servlet.Filter; import jakarta.servlet.FilterChain; import jakarta.servlet.FilterConfig; import jakarta.servlet.ServletContext; import jakarta.servlet.ServletException; import jakarta.servlet.ServletRequest; import jakarta.servlet.ServletResponse; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletRequestWrapper; import jakarta.servlet.http.HttpSession; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteClientDisconnectedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.IgniteTransactions; import org.apache.ignite.cluster.ClusterTopologyException; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.util.typedef.C1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.websession.WebSessionAttributeProcessor; import org.apache.ignite.internal.websession.WebSessionEntity; import org.apache.ignite.lang.IgniteClosure; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.marshaller.Marshaller; import org.apache.ignite.startup.servlet.ServletContextListenerStartup; import org.apache.ignite.transactions.Transaction; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_ASYNC; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * Filter for web sessions caching. * <p> * This is a request filter, that you need to specify in your {@code web.xml} along * with {@link ServletContextListenerStartup} to enable web sessions caching: * <pre name="code" class="xml"> * &lt;listener&gt; * &lt;listener-class&gt;org.apache.ignite.startup.servlet.ServletContextListenerStartup&lt;/listener-class&gt; * &lt;/listener&gt; * * &lt;filter&gt; * &lt;filter-name&gt;WebSessionFilter&lt;/filter-name&gt; * &lt;filter-class&gt;org.apache.ignite.cache.websession.WebSessionFilter&lt;/filter-class&gt; * &lt;/filter&gt; * * &lt;!-- You can also specify a custom URL pattern. --&gt; * &lt;filter-mapping&gt; * &lt;filter-name&gt;IgniteWebSessionsFilter&lt;/filter-name&gt; * &lt;url-pattern&gt;/*&lt;/url-pattern&gt; * &lt;/filter-mapping&gt; * </pre> * It is also possible to specify a servlet name in a filter mapping, and a servlet URL pattern will * be used in this case: * <pre name="code" class="xml"> * &lt;filter&gt; * &lt;filter-name&gt;WebSessionFilter&lt;/filter-name&gt; * &lt;filter-class&gt;org.apache.ignite.cache.websession.WebSessionFilter&lt;/filter-class&gt; * &lt;/filter&gt; * * &lt;filter-mapping&gt; * &lt;filter-name&gt;WebSessionFilter&lt;/filter-name&gt; * &lt;servlet-name&gt;YourServletName&lt;/servlet-name&gt; * &lt;/filter-mapping&gt; * </pre> * The filter has the following optional configuration parameters: * <table class="doctable"> * <tr> * <th>Name</th> * <th>Description</th> * <th>Default</th> * </tr> * <tr> * <td>IgniteWebSessionsGridName</td> * <td>Name of the grid that contains cache for web session storage.</td> * <td>{@code null} (default grid)</td> * </tr> * <tr> * <td>IgniteWebSessionsCacheName</td> * <td>Name of the cache for web session storage.</td> * <td>{@code null} (default cache)</td> * </tr> * <tr> * <td>IgniteWebSessionsMaximumRetriesOnFail</td> * <td> * Valid for {@code ATOMIC} caches only. Maximum number of retries for session updates in case * node leaves topology and update fails. If retry is enabled, * some updates can be applied more than once, otherwise some * updates can be lost. * <p> * To disable retries, set this parameter to {@code 0}. * </td> * <td>{@code 3}</td> * </tr> * <tr> * <td>IgniteWebSessionsRetriesTimeout</td> * <td> * Retry timeout. Related to IgniteWebSessionsMaximumRetriesOnFail param. * <p> * Further attempts will be cancelled in case timeout was exceeded. * </td> * <td>{@code 10000} (10 seconds)</td> * </tr> * </table> * These parameters are taken from either filter init parameter list or * servlet context parameters. You can specify filter init parameters as follows: * <pre name="code" class="xml"> * &lt;filter&gt; * &lt;filter-name&gt;WebSessionFilter&lt;/filter-name&gt; * &lt;filter-class&gt;org.apache.ignite.cache.websession.WebSessionFilter&lt;/filter-class&gt; * &lt;init-param&gt; * &lt;param-name&gt;IgniteWebSessionsGridName&lt;/param-name&gt; * &lt;param-value&gt;WebGrid&lt;/param-value&gt; * &lt;/init-param&gt; * &lt;init-param&gt; * &lt;param-name&gt;IgniteWebSessionsCacheName&lt;/param-name&gt; * &lt;param-value&gt;WebCache&lt;/param-value&gt; * &lt;/init-param&gt; * * &lt;!-- Valid for ATOMIC caches only. --&gt; * &lt;init-param&gt; * &lt;param-name&gt;IgniteWebSessionsMaximumRetriesOnFail&lt;/param-name&gt; * &lt;param-value&gt;10&lt;/param-value&gt; * &lt;/init-param&gt; * &lt;/filter&gt; * </pre> * <b>Note:</b> filter init parameter has a priority over servlet context * parameter; if you specify both, the servlet context parameter will be ignored. * <h1 class="header">Web sessions caching and concurrent requests</h1> * If your web application can accept concurrent request for one session, * consider using {@link org.apache.ignite.cache.CacheAtomicityMode#TRANSACTIONAL} cache * instead of {@link org.apache.ignite.cache.CacheAtomicityMode#ATOMIC}. In this case each request * be processed inside pessimistic transaction which will guarantee that all * updates will be applied in correct order. This is important, for example, * if you get some attribute from the session, update its value and set new * value back to the session. In case of {@link org.apache.ignite.cache.CacheAtomicityMode#ATOMIC} * cache concurrent requests can get equal value, but {@link org.apache.ignite.cache.CacheAtomicityMode#TRANSACTIONAL} * cache will always process such updates one after another. */ public class WebSessionFilter implements Filter { /** Web sessions caching grid name parameter name. */ public static final String WEB_SES_NAME_PARAM = "IgniteWebSessionsGridName"; /** Web sessions caching cache name parameter name. */ public static final String WEB_SES_CACHE_NAME_PARAM = "IgniteWebSessionsCacheName"; /** Web sessions caching retry on fail parameter name (valid for ATOMIC cache only). */ public static final String WEB_SES_MAX_RETRIES_ON_FAIL_NAME_PARAM = "IgniteWebSessionsMaximumRetriesOnFail"; /** Web sessions caching retry on fail timeout parameter name. */ public static final String WEB_SES_RETRIES_TIMEOUT_NAME_PARAM = "IgniteWebSessionsRetriesTimeout"; /** */ public static final String WEB_SES_KEEP_BINARY_PARAM = "IgniteWebSessionsKeepBinary"; /** Default retry on fail flag value. */ public static final int DFLT_MAX_RETRIES_ON_FAIL = 3; /** Default retry on fail timeout flag value. */ public static final int DFLT_RETRIES_ON_FAIL_TIMEOUT = 10000; /** Default keep binary flag. */ public static final boolean DFLT_KEEP_BINARY_FLAG = true; /** Cache. */ private IgniteCache<String, WebSession> cache; /** Binary cache */ private IgniteCache<String, WebSessionEntity> binaryCache; /** Transactions. */ private IgniteTransactions txs; /** Logger. */ private IgniteLogger log; /** Servlet context. */ private ServletContext ctx; /** Session ID transformer. */ private IgniteClosure<String, String> sesIdTransformer; /** Transactions enabled flag. */ private boolean txEnabled; /** Node. */ private Ignite webSesIgnite; /** Cache name. */ private String cacheName; /** */ private int retries; /** */ private int retriesTimeout; /** */ private boolean keepBinary = DFLT_KEEP_BINARY_FLAG; /** */ private Marshaller marshaller; /** {@inheritDoc} */ @Override public void init(FilterConfig cfg) throws ServletException { ctx = cfg.getServletContext(); String igniteInstanceName = U.firstNotNull( cfg.getInitParameter(WEB_SES_NAME_PARAM), ctx.getInitParameter(WEB_SES_NAME_PARAM)); cacheName = U.firstNotNull( cfg.getInitParameter(WEB_SES_CACHE_NAME_PARAM), ctx.getInitParameter(WEB_SES_CACHE_NAME_PARAM)); String retriesStr = U.firstNotNull( cfg.getInitParameter(WEB_SES_MAX_RETRIES_ON_FAIL_NAME_PARAM), ctx.getInitParameter(WEB_SES_MAX_RETRIES_ON_FAIL_NAME_PARAM)); try { retries = retriesStr != null ? Integer.parseInt(retriesStr) : DFLT_MAX_RETRIES_ON_FAIL; } catch (NumberFormatException e) { throw new IgniteException("Maximum number of retries parameter is invalid: " + retriesStr, e); } String retriesTimeoutStr = U.firstNotNull( cfg.getInitParameter(WEB_SES_RETRIES_TIMEOUT_NAME_PARAM), ctx.getInitParameter(WEB_SES_RETRIES_TIMEOUT_NAME_PARAM)); try { retriesTimeout = retriesTimeoutStr != null ? Integer.parseInt(retriesTimeoutStr) : DFLT_RETRIES_ON_FAIL_TIMEOUT; } catch (NumberFormatException e) { throw new IgniteException("Retries timeout parameter is invalid: " + retriesTimeoutStr, e); } final String binParam = cfg.getInitParameter(WEB_SES_KEEP_BINARY_PARAM); if (!F.isEmpty(binParam)) keepBinary = Boolean.parseBoolean(binParam); webSesIgnite = G.ignite(igniteInstanceName); if (webSesIgnite == null) throw new IgniteException("Ignite instance for web sessions caching is not started (is it configured?): " + igniteInstanceName); txs = webSesIgnite.transactions(); log = webSesIgnite.log(); marshaller = ((IgniteEx)webSesIgnite).context().marshaller(); initCache(); String srvInfo = ctx.getServerInfo(); // Special case for WebLogic, which appends timestamps to session // IDs upon session creation (the created session ID looks like: // pdpTSTcCcG6CVM8BTZWzxjTB1lh3w7zFbYVvwBb4bJGjrBx3TMPl!-508312620!1385045122601). if (srvInfo != null && srvInfo.contains("WebLogic")) { sesIdTransformer = new C1<String, String>() { @Override public String apply(String s) { // Find first exclamation mark. int idx = s.indexOf('!'); // Return original string if not found. if (idx < 0 || idx == s.length() - 1) return s; // Find second exclamation mark. idx = s.indexOf('!', idx + 1); // Return original string if not found. if (idx < 0) return s; // Return the session ID without timestamp. return s.substring(0, idx); } }; } if (log.isInfoEnabled()) log.info("Started web sessions caching [igniteInstanceName=" + igniteInstanceName + ", cacheName=" + cacheName + ", maxRetriesOnFail=" + retries + ']'); } /** * Init cache. */ @SuppressWarnings("unchecked") void initCache() { cache = webSesIgnite.cache(cacheName); binaryCache = webSesIgnite.cache(cacheName); if (cache == null) throw new IgniteException("Cache for web sessions is not started (is it configured?): " + cacheName); CacheConfiguration cacheCfg = cache.getConfiguration(CacheConfiguration.class); if (cacheCfg.getWriteSynchronizationMode() == FULL_ASYNC) throw new IgniteException("Cache for web sessions cannot be in FULL_ASYNC mode: " + cacheName); if (!cacheCfg.isEagerTtl()) throw new IgniteException("Cache for web sessions cannot operate with lazy TTL. " + "Consider setting eagerTtl to true for cache: " + cacheName); if (cacheCfg.getCacheMode() == PARTITIONED && cacheCfg.getAtomicityMode() != ATOMIC) U.quietAndWarn(webSesIgnite.log(), "Using " + cacheCfg.getAtomicityMode() + " atomicity for web sessions " + "caching (switch to ATOMIC mode for better performance)"); txEnabled = cacheCfg.getAtomicityMode() == TRANSACTIONAL; } /** {@inheritDoc} */ @Override public void destroy() { // No-op. } /** {@inheritDoc} */ @Override public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { assert ctx != null; if (req instanceof HttpServletRequest) { HttpServletRequest httpReq = (HttpServletRequest)req; String sesId = null; try { if (txEnabled) { try (Transaction tx = txs.txStart(PESSIMISTIC, REPEATABLE_READ)) { sesId = doFilterDispatch(httpReq, res, chain); tx.commit(); } } else sesId = doFilterDispatch(httpReq, res, chain); } catch (Exception e) { U.error(log, "Failed to update web session: " + sesId, e); } } else chain.doFilter(req, res); } /** * Use {@link WebSession} or {@link WebSessionV2} according to {@link #keepBinary} flag. * * @param httpReq Request. * @param res Response. * @param chain Filter chain. * @return Session ID. * @throws IOException * @throws ServletException * @throws CacheException */ private String doFilterDispatch(HttpServletRequest httpReq, ServletResponse res, FilterChain chain) throws IOException, ServletException, CacheException { if (keepBinary) return doFilterV2(httpReq, res, chain); return doFilterV1(httpReq, res, chain); } /** * @param httpReq Request. * @param res Response. * @param chain Filter chain. * @return Session ID. * @throws IOException In case of I/O error. * @throws ServletException In case of servlet error. * @throws CacheException In case of other error. */ private String doFilterV1(HttpServletRequest httpReq, ServletResponse res, FilterChain chain) throws IOException, ServletException, CacheException { WebSession cached = null; String sesId; if (httpReq.getSession(false) != null) sesId = httpReq.getSession(false).getId(); else sesId = httpReq.getRequestedSessionId(); if (sesId != null) { sesId = transformSessionId(sesId); for (int i = 0; i < retries; i++) { try { cached = cache.get(sesId); break; } catch (CacheException | IgniteException | IllegalStateException e) { handleLoadSessionException(sesId, i, e); } } if (cached != null) { if (log.isDebugEnabled()) log.debug("Using cached session for ID: " + sesId); if (cached.isNew()) cached = new WebSession(cached.getId(), cached, false); } else { if (log.isDebugEnabled()) log.debug("Cached session was invalidated and doesn't exist: " + sesId); HttpSession ses = httpReq.getSession(false); if (ses != null) { try { ses.invalidate(); } catch (IllegalStateException ignore) { // Session was already invalidated. } } cached = createSession(httpReq); } } else cached = createSession(httpReq); assert cached != null; sesId = cached.getId(); cached.servletContext(ctx); cached.filter(this); cached.resetUpdates(); cached.genSes(httpReq.getSession(false)); httpReq = new RequestWrapper(httpReq, cached); chain.doFilter(httpReq, res); HttpSession ses = httpReq.getSession(false); if (ses != null && ses instanceof WebSession) { Collection<T2<String, Object>> updates = ((WebSession)ses).updates(); if (updates != null) updateAttributes(transformSessionId(sesId), updates, ses.getMaxInactiveInterval()); } return sesId; } /** * @param httpReq Request. * @param res Response. * @param chain Filter chain. * @return Session ID. * @throws IOException In case of I/O error. * @throws ServletException In case oif servlet error. * @throws CacheException In case of other error. */ private String doFilterV2(HttpServletRequest httpReq, ServletResponse res, FilterChain chain) throws IOException, ServletException, CacheException { WebSessionV2 cached = null; String sesId; if (httpReq.getSession(false) != null) sesId = httpReq.getSession(false).getId(); else sesId = httpReq.getRequestedSessionId(); if (sesId != null) { sesId = transformSessionId(sesId); // Load from cache. for (int i = 0; i < retries; i++) { try { final WebSessionEntity entity = binaryCache.get(sesId); if (entity != null) cached = new WebSessionV2(sesId, httpReq.getSession(false), false, ctx, entity, marshaller); break; } catch (CacheException | IgniteException | IllegalStateException e) { handleLoadSessionException(sesId, i, e); } } if (cached != null) { if (log.isDebugEnabled()) log.debug("Using cached session for ID: " + sesId); } // If not found - invalidate session and create new one. // Invalidate, because session might be removed from cache // according to expiry policy. else { if (log.isDebugEnabled()) log.debug("Cached session was invalidated and doesn't exist: " + sesId); final HttpSession ses = httpReq.getSession(false); if (ses != null) { try { ses.invalidate(); } catch (IllegalStateException ignore) { // Session was already invalidated. } } cached = createSessionV2(httpReq); } } // No session was requested by the client, create new one and put in the request. else cached = createSessionV2(httpReq); assert cached != null; sesId = cached.getId(); httpReq = new RequestWrapperV2(httpReq, cached); chain.doFilter(httpReq, res); WebSessionV2 cachedNew = (WebSessionV2)httpReq.getSession(false); if (cachedNew != null && cachedNew.isValid()) updateAttributesV2(cachedNew.getId(), cachedNew); return sesId; } /** * Log and process exception happened on loading session from cache. * * @param sesId Session ID. * @param tryCnt Try count. * @param e Caught exception. */ private void handleLoadSessionException(final String sesId, final int tryCnt, final RuntimeException e) { if (log.isDebugEnabled()) log.debug(e.getMessage()); if (tryCnt == retries - 1) throw new IgniteException("Failed to handle request [session= " + sesId + "]", e); else { if (log.isDebugEnabled()) log.debug("Failed to handle request (will retry): " + sesId); handleCacheOperationException(e); } } /** * Transform session ID if ID transformer present. * * @param sesId Session ID to transform. * @return Transformed session ID or the same if no session transformer available. */ private String transformSessionId(final String sesId) { if (sesIdTransformer != null) return sesIdTransformer.apply(sesId); return sesId; } /** * Creates a new session from http request. * * @param httpReq Request. * @return New session. */ private WebSession createSession(HttpServletRequest httpReq) { HttpSession ses = httpReq.getSession(true); String sesId = transformSessionId(ses.getId()); return createSession(ses, sesId); } /** * Creates a new web session with the specified id. * * @param ses Base session. * @param sesId Session id. * @return New session. */ private WebSession createSession(HttpSession ses, String sesId) { WebSession cached = new WebSession(sesId, ses, true); cached.genSes(ses); if (log.isDebugEnabled()) log.debug("Session created: " + sesId); for (int i = 0; i < retries; i++) { try { final IgniteCache<String, WebSession> cache0 = cacheWithExpiryPolicy(cached.getMaxInactiveInterval(), cache); final WebSession old = cache0.getAndPutIfAbsent(sesId, cached); if (old != null) { cached = old; if (cached.isNew()) cached = new WebSession(cached.getId(), cached, false); } break; } catch (CacheException | IgniteException | IllegalStateException e) { handleCreateSessionException(sesId, i, e); } } return cached; } /** * Log error and delegate exception processing to {@link #handleCacheOperationException(Exception)} * * @param sesId Session ID. * @param tryCnt Try count. * @param e Exception to process. */ private void handleCreateSessionException(final String sesId, final int tryCnt, final RuntimeException e) { if (log.isDebugEnabled()) log.debug(e.getMessage()); if (tryCnt == retries - 1) throw new IgniteException("Failed to save session: " + sesId, e); else { if (log.isDebugEnabled()) log.debug("Failed to save session (will retry): " + sesId); handleCacheOperationException(e); } } /** * Creates a new web session with the specified id. * * @param ses Base session. * @param sesId Session id. * @return New session. */ private WebSessionV2 createSessionV2(final HttpSession ses, final String sesId) throws IOException { assert ses != null; assert sesId != null; WebSessionV2 cached = new WebSessionV2(sesId, ses, true, ctx, null, marshaller); final WebSessionEntity marshaledEntity = cached.marshalAttributes(); for (int i = 0; i < retries; i++) { try { final IgniteCache<String, WebSessionEntity> cache0 = cacheWithExpiryPolicy( cached.getMaxInactiveInterval(), binaryCache); final WebSessionEntity old = cache0.getAndPutIfAbsent(sesId, marshaledEntity); if (old != null) cached = new WebSessionV2(sesId, ses, false, ctx, old, marshaller); else cached = new WebSessionV2(sesId, ses, false, ctx, marshaledEntity, marshaller); break; } catch (CacheException | IgniteException | IllegalStateException e) { handleCreateSessionException(sesId, i, e); } } return cached; } /** * @param httpReq HTTP request. * @return Cached session. */ private WebSessionV2 createSessionV2(HttpServletRequest httpReq) throws IOException { final HttpSession ses = httpReq.getSession(true); final String sesId = transformSessionId(ses.getId()); if (log.isDebugEnabled()) log.debug("Session created: " + sesId); return createSessionV2(ses, sesId); } /** * @param maxInactiveInteval Interval to use in expiry policy. * @param cache Cache. * @param <T> Cached object type. * @return Cache with expiry policy if {@code maxInactiveInteval} greater than zero. */ private <T> IgniteCache<String, T> cacheWithExpiryPolicy(final int maxInactiveInteval, final IgniteCache<String, T> cache) { if (maxInactiveInteval > 0) { long ttl = maxInactiveInteval * 1000L; ExpiryPolicy plc = new ModifiedExpiryPolicy(new Duration(MILLISECONDS, ttl)); return cache.withExpiryPolicy(plc); } return cache; } /** * @param sesId Session ID. */ public void destroySession(String sesId) { assert sesId != null; for (int i = 0; i < retries; i++) { try { if (cache.remove(sesId) && log.isDebugEnabled()) log.debug("Session destroyed: " + sesId); } catch (CacheException | IgniteException | IllegalStateException e) { if (i == retries - 1) { U.warn(log, "Failed to remove session [sesId=" + sesId + ", retries=" + retries + ']'); } else { U.warn(log, "Failed to remove session (will retry): " + sesId); handleCacheOperationException(e); } } } } /** * @param sesId Session ID. * @param updates Updates list. * @param maxInactiveInterval Max session inactive interval. */ public void updateAttributes(String sesId, Collection<T2<String, Object>> updates, int maxInactiveInterval) { assert sesId != null; assert updates != null; if (log.isDebugEnabled()) log.debug("Session attributes updated [id=" + sesId + ", updates=" + updates + ']'); try { for (int i = 0; i < retries; i++) { try { final IgniteCache<String, WebSession> cache0 = cacheWithExpiryPolicy(maxInactiveInterval, cache); cache0.invoke(sesId, WebSessionListener.newAttributeProcessor(updates)); break; } catch (CacheException | IgniteException | IllegalStateException e) { handleAttributeUpdateException(sesId, i, e); } } } catch (Exception e) { U.error(log, "Failed to update session attributes [id=" + sesId + ']', e); } } /** * @param sesId Session ID. * @param ses Web session. */ public void updateAttributesV2(final String sesId, final WebSessionV2 ses) throws IOException { assert sesId != null; assert ses != null; final Map<String, byte[]> updatesMap = ses.binaryUpdatesMap(); if (log.isDebugEnabled()) log.debug("Session binary attributes updated [id=" + sesId + ", updates=" + updatesMap.keySet() + ']'); try { for (int i = 0; i < retries; i++) { try { final IgniteCache<String, WebSessionEntity> cache0 = cacheWithExpiryPolicy(ses.getMaxInactiveInterval(), binaryCache); cache0.invoke(sesId, new WebSessionAttributeProcessor(updatesMap.isEmpty() ? null : updatesMap, ses.getLastAccessedTime(), ses.getMaxInactiveInterval(), ses.isMaxInactiveIntervalChanged())); break; } catch (CacheException | IgniteException | IllegalStateException e) { handleAttributeUpdateException(sesId, i, e); } } } catch (Exception e) { U.error(log, "Failed to update session V2 attributes [id=" + sesId + ']', e); } } /** * Log error and delegate processing to {@link #handleCacheOperationException(Exception)}. * * @param sesId Session ID. * @param tryCnt Try count. * @param e Exception to process. */ private void handleAttributeUpdateException(final String sesId, final int tryCnt, final RuntimeException e) { if (tryCnt == retries - 1) { U.error(log, "Failed to apply updates for session (maximum number of retries exceeded) [sesId=" + sesId + ", retries=" + retries + ']', e); } else { U.warn(log, "Failed to apply updates for session (will retry): " + sesId); handleCacheOperationException(e); } } /** * Handles cache operation exception. * @param e Exception */ void handleCacheOperationException(Exception e) { IgniteFuture<?> retryFut = null; if (e instanceof IllegalStateException) { initCache(); return; } else if (X.hasCause(e, IgniteClientDisconnectedException.class)) { IgniteClientDisconnectedException cause = X.cause(e, IgniteClientDisconnectedException.class); assert cause != null : e; retryFut = cause.reconnectFuture(); } else if (X.hasCause(e, ClusterTopologyException.class)) { ClusterTopologyException cause = X.cause(e, ClusterTopologyException.class); assert cause != null : e; retryFut = cause.retryReadyFuture(); } if (retryFut != null) { try { retryFut.get(retriesTimeout); } catch (IgniteException retryErr) { throw new IgniteException("Failed to wait for retry: " + retryErr); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(WebSessionFilter.class, this); } /** * Request wrapper. */ private class RequestWrapper extends HttpServletRequestWrapper { /** Session. */ private volatile WebSession ses; /** * @param req Request. * @param ses Session. */ private RequestWrapper(HttpServletRequest req, WebSession ses) { super(req); assert ses != null; this.ses = ses; } /** {@inheritDoc} */ @Override public HttpSession getSession(boolean create) { if (!ses.isValid()) { if (create) { this.ses = createSession((HttpServletRequest)getRequest()); this.ses.servletContext(ctx); this.ses.filter(WebSessionFilter.this); this.ses.resetUpdates(); } else return null; } return ses; } /** {@inheritDoc} */ @Override public HttpSession getSession() { return getSession(true); } /** {@inheritDoc} */ @Override public String changeSessionId() { HttpServletRequest req = (HttpServletRequest)getRequest(); String newId = req.changeSessionId(); this.ses.setId(newId); this.ses = createSession(ses, newId); this.ses.servletContext(ctx); this.ses.filter(WebSessionFilter.this); this.ses.resetUpdates(); return newId; } /** {@inheritDoc} */ @Override public void login(String username, String password) throws ServletException { HttpServletRequest req = (HttpServletRequest)getRequest(); req.login(username, password); String newId = req.getSession(false).getId(); this.ses.setId(newId); this.ses = createSession(ses, newId); this.ses.servletContext(ctx); this.ses.filter(WebSessionFilter.this); this.ses.resetUpdates(); } /** {@inheritDoc} */ @Override public boolean isRequestedSessionIdValid() { return ses.isValid(); } } /** * Request wrapper V2. */ private class RequestWrapperV2 extends HttpServletRequestWrapper { /** Session. */ private WebSessionV2 ses; /** * @param req Request. * @param ses Session. */ private RequestWrapperV2(HttpServletRequest req, WebSessionV2 ses) { super(req); assert ses != null; this.ses = ses; } /** {@inheritDoc} */ @Override public HttpSession getSession(boolean create) { if (ses != null && !ses.isValid()) { binaryCache.remove(ses.id()); if (create) { try { ses = createSessionV2((HttpServletRequest)getRequest()); } catch (IOException e) { throw new IgniteException(e); } } else ses = null; } return ses; } /** {@inheritDoc} */ @Override public HttpSession getSession() { return getSession(true); } /** {@inheritDoc} */ @Override public String changeSessionId() { final HttpServletRequest req = (HttpServletRequest)getRequest(); final String newId = req.changeSessionId(); if (!Objects.equals(newId, ses.getId())) { try { ses = createSessionV2(ses, newId); } catch (IOException e) { throw new IgniteException(e); } } return newId; } /** {@inheritDoc} */ @Override public void login(String username, String password) throws ServletException { final HttpServletRequest req = (HttpServletRequest)getRequest(); req.login(username, password); final String newId = req.getSession(false).getId(); if (!Objects.equals(newId, ses.getId())) { try { ses = createSessionV2(ses, newId); } catch (IOException e) { throw new IgniteException(e); } } } /** {@inheritDoc} */ @Override public boolean isRequestedSessionIdValid() { return ses != null && ses.isValid(); } } }
googleapis/google-cloud-java
37,544
java-maps-routing/grpc-google-maps-routing-v2/src/main/java/com/google/maps/routing/v2/RoutesGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.maps.routing.v2; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * The Routes API. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/maps/routing/v2/routes_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class RoutesGrpc { private RoutesGrpc() {} public static final java.lang.String SERVICE_NAME = "google.maps.routing.v2.Routes"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.maps.routing.v2.ComputeRoutesRequest, com.google.maps.routing.v2.ComputeRoutesResponse> getComputeRoutesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ComputeRoutes", requestType = com.google.maps.routing.v2.ComputeRoutesRequest.class, responseType = com.google.maps.routing.v2.ComputeRoutesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.maps.routing.v2.ComputeRoutesRequest, com.google.maps.routing.v2.ComputeRoutesResponse> getComputeRoutesMethod() { io.grpc.MethodDescriptor< com.google.maps.routing.v2.ComputeRoutesRequest, com.google.maps.routing.v2.ComputeRoutesResponse> getComputeRoutesMethod; if ((getComputeRoutesMethod = RoutesGrpc.getComputeRoutesMethod) == null) { synchronized (RoutesGrpc.class) { if ((getComputeRoutesMethod = RoutesGrpc.getComputeRoutesMethod) == null) { RoutesGrpc.getComputeRoutesMethod = getComputeRoutesMethod = io.grpc.MethodDescriptor .<com.google.maps.routing.v2.ComputeRoutesRequest, com.google.maps.routing.v2.ComputeRoutesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ComputeRoutes")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.maps.routing.v2.ComputeRoutesRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.maps.routing.v2.ComputeRoutesResponse .getDefaultInstance())) .setSchemaDescriptor(new RoutesMethodDescriptorSupplier("ComputeRoutes")) .build(); } } } return getComputeRoutesMethod; } private static volatile io.grpc.MethodDescriptor< com.google.maps.routing.v2.ComputeRouteMatrixRequest, com.google.maps.routing.v2.RouteMatrixElement> getComputeRouteMatrixMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ComputeRouteMatrix", requestType = com.google.maps.routing.v2.ComputeRouteMatrixRequest.class, responseType = com.google.maps.routing.v2.RouteMatrixElement.class, methodType = io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) public static io.grpc.MethodDescriptor< com.google.maps.routing.v2.ComputeRouteMatrixRequest, com.google.maps.routing.v2.RouteMatrixElement> getComputeRouteMatrixMethod() { io.grpc.MethodDescriptor< com.google.maps.routing.v2.ComputeRouteMatrixRequest, com.google.maps.routing.v2.RouteMatrixElement> getComputeRouteMatrixMethod; if ((getComputeRouteMatrixMethod = RoutesGrpc.getComputeRouteMatrixMethod) == null) { synchronized (RoutesGrpc.class) { if ((getComputeRouteMatrixMethod = RoutesGrpc.getComputeRouteMatrixMethod) == null) { RoutesGrpc.getComputeRouteMatrixMethod = getComputeRouteMatrixMethod = io.grpc.MethodDescriptor .<com.google.maps.routing.v2.ComputeRouteMatrixRequest, com.google.maps.routing.v2.RouteMatrixElement> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ComputeRouteMatrix")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.maps.routing.v2.ComputeRouteMatrixRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.maps.routing.v2.RouteMatrixElement.getDefaultInstance())) .setSchemaDescriptor(new RoutesMethodDescriptorSupplier("ComputeRouteMatrix")) .build(); } } } return getComputeRouteMatrixMethod; } /** Creates a new async stub that supports all call types for the service */ public static RoutesStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<RoutesStub> factory = new io.grpc.stub.AbstractStub.StubFactory<RoutesStub>() { @java.lang.Override public RoutesStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesStub(channel, callOptions); } }; return RoutesStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static RoutesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<RoutesBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<RoutesBlockingV2Stub>() { @java.lang.Override public RoutesBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesBlockingV2Stub(channel, callOptions); } }; return RoutesBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static RoutesBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<RoutesBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<RoutesBlockingStub>() { @java.lang.Override public RoutesBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesBlockingStub(channel, callOptions); } }; return RoutesBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static RoutesFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<RoutesFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<RoutesFutureStub>() { @java.lang.Override public RoutesFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesFutureStub(channel, callOptions); } }; return RoutesFutureStub.newStub(factory, channel); } /** * * * <pre> * The Routes API. * </pre> */ public interface AsyncService { /** * * * <pre> * Returns the primary route along with optional alternate routes, given a set * of terminal and intermediate waypoints. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using URL parameter * `$fields` or `fields`, or by using an HTTP/gRPC header `X-Goog-FieldMask` * (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). The value * is a comma separated list of field paths. See detailed documentation about * [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of Route-level duration, distance, and polyline (an example * production setup): * `X-Goog-FieldMask: * routes.duration,routes.distanceMeters,routes.polyline.encodedPolyline` * Google discourage the use of the wildcard (`*`) response field mask, or * specifying the field mask at the top level (`routes`), because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need * in your production job ensures stable latency performance. We might add * more response fields in the future, and those new fields might require * extra computation time. If you select all fields, or if you select all * fields at the top level, then you might experience performance degradation * because any new field we add will be automatically included in the * response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ default void computeRoutes( com.google.maps.routing.v2.ComputeRoutesRequest request, io.grpc.stub.StreamObserver<com.google.maps.routing.v2.ComputeRoutesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getComputeRoutesMethod(), responseObserver); } /** * * * <pre> * Takes in a list of origins and destinations and returns a stream containing * route information for each combination of origin and destination. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using the URL * parameter `$fields` or `fields`, or by using the HTTP/gRPC header * `X-Goog-FieldMask` (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). * The value is a comma separated list of field paths. See this detailed * documentation about [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of route durations, distances, element status, condition, and * element indices (an example production setup): * `X-Goog-FieldMask: * originIndex,destinationIndex,status,condition,distanceMeters,duration` * It is critical that you include `status` in your field mask as otherwise * all messages will appear to be OK. Google discourages the use of the * wildcard (`*`) response field mask, because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need in your production job ensures * stable latency performance. We might add more response fields in the * future, and those new fields might require extra computation time. If you * select all fields, or if you select all fields at the top level, then you * might experience performance degradation because any new field we add will * be automatically included in the response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ default void computeRouteMatrix( com.google.maps.routing.v2.ComputeRouteMatrixRequest request, io.grpc.stub.StreamObserver<com.google.maps.routing.v2.RouteMatrixElement> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getComputeRouteMatrixMethod(), responseObserver); } } /** * Base class for the server implementation of the service Routes. * * <pre> * The Routes API. * </pre> */ public abstract static class RoutesImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return RoutesGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service Routes. * * <pre> * The Routes API. * </pre> */ public static final class RoutesStub extends io.grpc.stub.AbstractAsyncStub<RoutesStub> { private RoutesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected RoutesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesStub(channel, callOptions); } /** * * * <pre> * Returns the primary route along with optional alternate routes, given a set * of terminal and intermediate waypoints. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using URL parameter * `$fields` or `fields`, or by using an HTTP/gRPC header `X-Goog-FieldMask` * (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). The value * is a comma separated list of field paths. See detailed documentation about * [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of Route-level duration, distance, and polyline (an example * production setup): * `X-Goog-FieldMask: * routes.duration,routes.distanceMeters,routes.polyline.encodedPolyline` * Google discourage the use of the wildcard (`*`) response field mask, or * specifying the field mask at the top level (`routes`), because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need * in your production job ensures stable latency performance. We might add * more response fields in the future, and those new fields might require * extra computation time. If you select all fields, or if you select all * fields at the top level, then you might experience performance degradation * because any new field we add will be automatically included in the * response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ public void computeRoutes( com.google.maps.routing.v2.ComputeRoutesRequest request, io.grpc.stub.StreamObserver<com.google.maps.routing.v2.ComputeRoutesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getComputeRoutesMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Takes in a list of origins and destinations and returns a stream containing * route information for each combination of origin and destination. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using the URL * parameter `$fields` or `fields`, or by using the HTTP/gRPC header * `X-Goog-FieldMask` (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). * The value is a comma separated list of field paths. See this detailed * documentation about [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of route durations, distances, element status, condition, and * element indices (an example production setup): * `X-Goog-FieldMask: * originIndex,destinationIndex,status,condition,distanceMeters,duration` * It is critical that you include `status` in your field mask as otherwise * all messages will appear to be OK. Google discourages the use of the * wildcard (`*`) response field mask, because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need in your production job ensures * stable latency performance. We might add more response fields in the * future, and those new fields might require extra computation time. If you * select all fields, or if you select all fields at the top level, then you * might experience performance degradation because any new field we add will * be automatically included in the response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ public void computeRouteMatrix( com.google.maps.routing.v2.ComputeRouteMatrixRequest request, io.grpc.stub.StreamObserver<com.google.maps.routing.v2.RouteMatrixElement> responseObserver) { io.grpc.stub.ClientCalls.asyncServerStreamingCall( getChannel().newCall(getComputeRouteMatrixMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service Routes. * * <pre> * The Routes API. * </pre> */ public static final class RoutesBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<RoutesBlockingV2Stub> { private RoutesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected RoutesBlockingV2Stub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Returns the primary route along with optional alternate routes, given a set * of terminal and intermediate waypoints. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using URL parameter * `$fields` or `fields`, or by using an HTTP/gRPC header `X-Goog-FieldMask` * (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). The value * is a comma separated list of field paths. See detailed documentation about * [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of Route-level duration, distance, and polyline (an example * production setup): * `X-Goog-FieldMask: * routes.duration,routes.distanceMeters,routes.polyline.encodedPolyline` * Google discourage the use of the wildcard (`*`) response field mask, or * specifying the field mask at the top level (`routes`), because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need * in your production job ensures stable latency performance. We might add * more response fields in the future, and those new fields might require * extra computation time. If you select all fields, or if you select all * fields at the top level, then you might experience performance degradation * because any new field we add will be automatically included in the * response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ public com.google.maps.routing.v2.ComputeRoutesResponse computeRoutes( com.google.maps.routing.v2.ComputeRoutesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getComputeRoutesMethod(), getCallOptions(), request); } /** * * * <pre> * Takes in a list of origins and destinations and returns a stream containing * route information for each combination of origin and destination. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using the URL * parameter `$fields` or `fields`, or by using the HTTP/gRPC header * `X-Goog-FieldMask` (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). * The value is a comma separated list of field paths. See this detailed * documentation about [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of route durations, distances, element status, condition, and * element indices (an example production setup): * `X-Goog-FieldMask: * originIndex,destinationIndex,status,condition,distanceMeters,duration` * It is critical that you include `status` in your field mask as otherwise * all messages will appear to be OK. Google discourages the use of the * wildcard (`*`) response field mask, because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need in your production job ensures * stable latency performance. We might add more response fields in the * future, and those new fields might require extra computation time. If you * select all fields, or if you select all fields at the top level, then you * might experience performance degradation because any new field we add will * be automatically included in the response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/10918") public io.grpc.stub.BlockingClientCall<?, com.google.maps.routing.v2.RouteMatrixElement> computeRouteMatrix(com.google.maps.routing.v2.ComputeRouteMatrixRequest request) { return io.grpc.stub.ClientCalls.blockingV2ServerStreamingCall( getChannel(), getComputeRouteMatrixMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service Routes. * * <pre> * The Routes API. * </pre> */ public static final class RoutesBlockingStub extends io.grpc.stub.AbstractBlockingStub<RoutesBlockingStub> { private RoutesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected RoutesBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesBlockingStub(channel, callOptions); } /** * * * <pre> * Returns the primary route along with optional alternate routes, given a set * of terminal and intermediate waypoints. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using URL parameter * `$fields` or `fields`, or by using an HTTP/gRPC header `X-Goog-FieldMask` * (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). The value * is a comma separated list of field paths. See detailed documentation about * [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of Route-level duration, distance, and polyline (an example * production setup): * `X-Goog-FieldMask: * routes.duration,routes.distanceMeters,routes.polyline.encodedPolyline` * Google discourage the use of the wildcard (`*`) response field mask, or * specifying the field mask at the top level (`routes`), because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need * in your production job ensures stable latency performance. We might add * more response fields in the future, and those new fields might require * extra computation time. If you select all fields, or if you select all * fields at the top level, then you might experience performance degradation * because any new field we add will be automatically included in the * response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ public com.google.maps.routing.v2.ComputeRoutesResponse computeRoutes( com.google.maps.routing.v2.ComputeRoutesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getComputeRoutesMethod(), getCallOptions(), request); } /** * * * <pre> * Takes in a list of origins and destinations and returns a stream containing * route information for each combination of origin and destination. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using the URL * parameter `$fields` or `fields`, or by using the HTTP/gRPC header * `X-Goog-FieldMask` (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). * The value is a comma separated list of field paths. See this detailed * documentation about [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of route durations, distances, element status, condition, and * element indices (an example production setup): * `X-Goog-FieldMask: * originIndex,destinationIndex,status,condition,distanceMeters,duration` * It is critical that you include `status` in your field mask as otherwise * all messages will appear to be OK. Google discourages the use of the * wildcard (`*`) response field mask, because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need in your production job ensures * stable latency performance. We might add more response fields in the * future, and those new fields might require extra computation time. If you * select all fields, or if you select all fields at the top level, then you * might experience performance degradation because any new field we add will * be automatically included in the response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ public java.util.Iterator<com.google.maps.routing.v2.RouteMatrixElement> computeRouteMatrix( com.google.maps.routing.v2.ComputeRouteMatrixRequest request) { return io.grpc.stub.ClientCalls.blockingServerStreamingCall( getChannel(), getComputeRouteMatrixMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service Routes. * * <pre> * The Routes API. * </pre> */ public static final class RoutesFutureStub extends io.grpc.stub.AbstractFutureStub<RoutesFutureStub> { private RoutesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected RoutesFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new RoutesFutureStub(channel, callOptions); } /** * * * <pre> * Returns the primary route along with optional alternate routes, given a set * of terminal and intermediate waypoints. * **NOTE:** This method requires that you specify a response field mask in * the input. You can provide the response field mask by using URL parameter * `$fields` or `fields`, or by using an HTTP/gRPC header `X-Goog-FieldMask` * (see the [available URL parameters and * headers](https://cloud.google.com/apis/docs/system-parameters)). The value * is a comma separated list of field paths. See detailed documentation about * [how to construct the field * paths](https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto). * For example, in this method: * * Field mask of all available fields (for manual inspection): * `X-Goog-FieldMask: *` * * Field mask of Route-level duration, distance, and polyline (an example * production setup): * `X-Goog-FieldMask: * routes.duration,routes.distanceMeters,routes.polyline.encodedPolyline` * Google discourage the use of the wildcard (`*`) response field mask, or * specifying the field mask at the top level (`routes`), because: * * Selecting only the fields that you need helps our server save computation * cycles, allowing us to return the result to you with a lower latency. * * Selecting only the fields that you need * in your production job ensures stable latency performance. We might add * more response fields in the future, and those new fields might require * extra computation time. If you select all fields, or if you select all * fields at the top level, then you might experience performance degradation * because any new field we add will be automatically included in the * response. * * Selecting only the fields that you need results in a smaller response * size, and thus higher network throughput. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.maps.routing.v2.ComputeRoutesResponse> computeRoutes(com.google.maps.routing.v2.ComputeRoutesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getComputeRoutesMethod(), getCallOptions()), request); } } private static final int METHODID_COMPUTE_ROUTES = 0; private static final int METHODID_COMPUTE_ROUTE_MATRIX = 1; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_COMPUTE_ROUTES: serviceImpl.computeRoutes( (com.google.maps.routing.v2.ComputeRoutesRequest) request, (io.grpc.stub.StreamObserver<com.google.maps.routing.v2.ComputeRoutesResponse>) responseObserver); break; case METHODID_COMPUTE_ROUTE_MATRIX: serviceImpl.computeRouteMatrix( (com.google.maps.routing.v2.ComputeRouteMatrixRequest) request, (io.grpc.stub.StreamObserver<com.google.maps.routing.v2.RouteMatrixElement>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getComputeRoutesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.maps.routing.v2.ComputeRoutesRequest, com.google.maps.routing.v2.ComputeRoutesResponse>( service, METHODID_COMPUTE_ROUTES))) .addMethod( getComputeRouteMatrixMethod(), io.grpc.stub.ServerCalls.asyncServerStreamingCall( new MethodHandlers< com.google.maps.routing.v2.ComputeRouteMatrixRequest, com.google.maps.routing.v2.RouteMatrixElement>( service, METHODID_COMPUTE_ROUTE_MATRIX))) .build(); } private abstract static class RoutesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { RoutesBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.maps.routing.v2.RoutesServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("Routes"); } } private static final class RoutesFileDescriptorSupplier extends RoutesBaseDescriptorSupplier { RoutesFileDescriptorSupplier() {} } private static final class RoutesMethodDescriptorSupplier extends RoutesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; RoutesMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (RoutesGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new RoutesFileDescriptorSupplier()) .addMethod(getComputeRoutesMethod()) .addMethod(getComputeRouteMatrixMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
37,247
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchReadTensorboardTimeSeriesDataRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/tensorboard_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Request message for * [TensorboardService.BatchReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1beta1.TensorboardService.BatchReadTensorboardTimeSeriesData]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest} */ public final class BatchReadTensorboardTimeSeriesDataRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest) BatchReadTensorboardTimeSeriesDataRequestOrBuilder { private static final long serialVersionUID = 0L; // Use BatchReadTensorboardTimeSeriesDataRequest.newBuilder() to construct. private BatchReadTensorboardTimeSeriesDataRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchReadTensorboardTimeSeriesDataRequest() { tensorboard_ = ""; timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchReadTensorboardTimeSeriesDataRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1beta1_BatchReadTensorboardTimeSeriesDataRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1beta1_BatchReadTensorboardTimeSeriesDataRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.class, com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.Builder .class); } public static final int TENSORBOARD_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object tensorboard_ = ""; /** * * * <pre> * Required. The resource name of the Tensorboard containing * TensorboardTimeSeries to read data from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. * The TensorboardTimeSeries referenced by * [time_series][google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.time_series] * must be sub resources of this Tensorboard. * </pre> * * <code> * string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The tensorboard. */ @java.lang.Override public java.lang.String getTensorboard() { java.lang.Object ref = tensorboard_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tensorboard_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Tensorboard containing * TensorboardTimeSeries to read data from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. * The TensorboardTimeSeries referenced by * [time_series][google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.time_series] * must be sub resources of this Tensorboard. * </pre> * * <code> * string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for tensorboard. */ @java.lang.Override public com.google.protobuf.ByteString getTensorboardBytes() { java.lang.Object ref = tensorboard_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tensorboard_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TIME_SERIES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the timeSeries. */ public com.google.protobuf.ProtocolStringList getTimeSeriesList() { return timeSeries_; } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The count of timeSeries. */ public int getTimeSeriesCount() { return timeSeries_.size(); } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The timeSeries at the given index. */ public java.lang.String getTimeSeries(int index) { return timeSeries_.get(index); } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the timeSeries at the given index. */ public com.google.protobuf.ByteString getTimeSeriesBytes(int index) { return timeSeries_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tensorboard_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tensorboard_); } for (int i = 0; i < timeSeries_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, timeSeries_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tensorboard_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tensorboard_); } { int dataSize = 0; for (int i = 0; i < timeSeries_.size(); i++) { dataSize += computeStringSizeNoTag(timeSeries_.getRaw(i)); } size += dataSize; size += 1 * getTimeSeriesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest other = (com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest) obj; if (!getTensorboard().equals(other.getTensorboard())) return false; if (!getTimeSeriesList().equals(other.getTimeSeriesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TENSORBOARD_FIELD_NUMBER; hash = (53 * hash) + getTensorboard().hashCode(); if (getTimeSeriesCount() > 0) { hash = (37 * hash) + TIME_SERIES_FIELD_NUMBER; hash = (53 * hash) + getTimeSeriesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [TensorboardService.BatchReadTensorboardTimeSeriesData][google.cloud.aiplatform.v1beta1.TensorboardService.BatchReadTensorboardTimeSeriesData]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest) com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1beta1_BatchReadTensorboardTimeSeriesDataRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1beta1_BatchReadTensorboardTimeSeriesDataRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.class, com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.Builder .class); } // Construct using // com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; tensorboard_ = ""; timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1beta1_BatchReadTensorboardTimeSeriesDataRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest build() { com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest buildPartial() { com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest result = new com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.tensorboard_ = tensorboard_; } if (((from_bitField0_ & 0x00000002) != 0)) { timeSeries_.makeImmutable(); result.timeSeries_ = timeSeries_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest) { return mergeFrom( (com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest other) { if (other == com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest .getDefaultInstance()) return this; if (!other.getTensorboard().isEmpty()) { tensorboard_ = other.tensorboard_; bitField0_ |= 0x00000001; onChanged(); } if (!other.timeSeries_.isEmpty()) { if (timeSeries_.isEmpty()) { timeSeries_ = other.timeSeries_; bitField0_ |= 0x00000002; } else { ensureTimeSeriesIsMutable(); timeSeries_.addAll(other.timeSeries_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { tensorboard_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); ensureTimeSeriesIsMutable(); timeSeries_.add(s); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object tensorboard_ = ""; /** * * * <pre> * Required. The resource name of the Tensorboard containing * TensorboardTimeSeries to read data from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. * The TensorboardTimeSeries referenced by * [time_series][google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.time_series] * must be sub resources of this Tensorboard. * </pre> * * <code> * string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The tensorboard. */ public java.lang.String getTensorboard() { java.lang.Object ref = tensorboard_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tensorboard_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Tensorboard containing * TensorboardTimeSeries to read data from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. * The TensorboardTimeSeries referenced by * [time_series][google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.time_series] * must be sub resources of this Tensorboard. * </pre> * * <code> * string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for tensorboard. */ public com.google.protobuf.ByteString getTensorboardBytes() { java.lang.Object ref = tensorboard_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tensorboard_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Tensorboard containing * TensorboardTimeSeries to read data from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. * The TensorboardTimeSeries referenced by * [time_series][google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.time_series] * must be sub resources of this Tensorboard. * </pre> * * <code> * string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The tensorboard to set. * @return This builder for chaining. */ public Builder setTensorboard(java.lang.String value) { if (value == null) { throw new NullPointerException(); } tensorboard_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Tensorboard containing * TensorboardTimeSeries to read data from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. * The TensorboardTimeSeries referenced by * [time_series][google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.time_series] * must be sub resources of this Tensorboard. * </pre> * * <code> * string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearTensorboard() { tensorboard_ = getDefaultInstance().getTensorboard(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Tensorboard containing * TensorboardTimeSeries to read data from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. * The TensorboardTimeSeries referenced by * [time_series][google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest.time_series] * must be sub resources of this Tensorboard. * </pre> * * <code> * string tensorboard = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for tensorboard to set. * @return This builder for chaining. */ public Builder setTensorboardBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); tensorboard_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureTimeSeriesIsMutable() { if (!timeSeries_.isModifiable()) { timeSeries_ = new com.google.protobuf.LazyStringArrayList(timeSeries_); } bitField0_ |= 0x00000002; } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the timeSeries. */ public com.google.protobuf.ProtocolStringList getTimeSeriesList() { timeSeries_.makeImmutable(); return timeSeries_; } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The count of timeSeries. */ public int getTimeSeriesCount() { return timeSeries_.size(); } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The timeSeries at the given index. */ public java.lang.String getTimeSeries(int index) { return timeSeries_.get(index); } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the timeSeries at the given index. */ public com.google.protobuf.ByteString getTimeSeriesBytes(int index) { return timeSeries_.getByteString(index); } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param index The index to set the value at. * @param value The timeSeries to set. * @return This builder for chaining. */ public Builder setTimeSeries(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTimeSeriesIsMutable(); timeSeries_.set(index, value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The timeSeries to add. * @return This builder for chaining. */ public Builder addTimeSeries(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureTimeSeriesIsMutable(); timeSeries_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param values The timeSeries to add. * @return This builder for chaining. */ public Builder addAllTimeSeries(java.lang.Iterable<java.lang.String> values) { ensureTimeSeriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, timeSeries_); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearTimeSeries() { timeSeries_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); ; onChanged(); return this; } /** * * * <pre> * Required. The resource names of the TensorboardTimeSeries to read data * from. Format: * `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` * </pre> * * <code> * repeated string time_series = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes of the timeSeries to add. * @return This builder for chaining. */ public Builder addTimeSeriesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureTimeSeriesIsMutable(); timeSeries_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest) private static final com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest(); } public static com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchReadTensorboardTimeSeriesDataRequest> PARSER = new com.google.protobuf.AbstractParser<BatchReadTensorboardTimeSeriesDataRequest>() { @java.lang.Override public BatchReadTensorboardTimeSeriesDataRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchReadTensorboardTimeSeriesDataRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchReadTensorboardTimeSeriesDataRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.BatchReadTensorboardTimeSeriesDataRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
36,556
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.util; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Random; import java.util.Vector; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.MemInfo; import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.ProcessSmapMemoryInfo; import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.ProcessTreeSmapMemInfo; import static org.apache.hadoop.yarn.util.ProcfsBasedProcessTree.KB_TO_BYTES; import static org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree.UNAVAILABLE; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeTrue; /** * A JUnit test to test ProcfsBasedProcessTree. */ public class TestProcfsBasedProcessTree { private static final Logger LOG = LoggerFactory.getLogger(TestProcfsBasedProcessTree.class); protected static File TEST_ROOT_DIR = new File("target", TestProcfsBasedProcessTree.class.getName() + "-localDir"); private ShellCommandExecutor shexec = null; private String pidFile, lowestDescendant, lostDescendant; private String shellScript; private static final int N = 6; // Controls the RogueTask private class RogueTaskThread extends Thread { public void run() { try { Vector<String> args = new Vector<String>(); if (isSetsidAvailable()) { args.add("setsid"); } args.add("bash"); args.add("-c"); args.add(" echo $$ > " + pidFile + "; sh " + shellScript + " " + N + ";"); shexec = new ShellCommandExecutor(args.toArray(new String[0])); shexec.execute(); } catch (ExitCodeException ee) { LOG.info("Shell Command exit with a non-zero exit code. This is" + " expected as we are killing the subprocesses of the" + " task intentionally. " + ee); } catch (IOException ioe) { LOG.info("Error executing shell command " + ioe); } finally { LOG.info("Exit code: " + shexec.getExitCode()); } } } private String getRogueTaskPID() { File f = new File(pidFile); while (!f.exists()) { try { Thread.sleep(500); } catch (InterruptedException ie) { break; } } // read from pidFile return getPidFromPidFile(pidFile); } @BeforeEach public void setup() throws IOException { assumeTrue(Shell.LINUX); FileContext.getLocalFSFileContext().delete( new Path(TEST_ROOT_DIR.getAbsolutePath()), true); } @Test @Timeout(30000) void testProcessTree() throws Exception { try { assertTrue(ProcfsBasedProcessTree.isAvailable()); } catch (Exception e) { LOG.info(StringUtils.stringifyException(e)); assertTrue(false, "ProcfsBaseProcessTree should be available on Linux"); return; } // create shell script Random rm = new Random(); File tempFile = new File(TEST_ROOT_DIR, getClass().getName() + "_shellScript_" + rm.nextInt() + ".sh"); tempFile.deleteOnExit(); shellScript = TEST_ROOT_DIR + File.separator + tempFile.getName(); // create pid file tempFile = new File(TEST_ROOT_DIR, getClass().getName() + "_pidFile_" + rm.nextInt() + ".pid"); tempFile.deleteOnExit(); pidFile = TEST_ROOT_DIR + File.separator + tempFile.getName(); lowestDescendant = TEST_ROOT_DIR + File.separator + "lowestDescendantPidFile"; lostDescendant = TEST_ROOT_DIR + File.separator + "lostDescendantPidFile"; // write to shell-script File file = new File(shellScript); FileUtils.writeStringToFile(file, "# rogue task\n" + "sleep 1\n" + "echo hello\n" + "if [ $1 -ne 0 ]\n" + "then\n" + " sh " + shellScript + " $(($1-1))\n" + "else\n" + " echo $$ > " + lowestDescendant + "\n" + "(sleep 300&\n" + "echo $! > " + lostDescendant + ")\n" + " while true\n do\n" + " sleep 5\n" + " done\n" + "fi", StandardCharsets.UTF_8); Thread t = new RogueTaskThread(); t.start(); String pid = getRogueTaskPID(); LOG.info("Root process pid: " + pid); ProcfsBasedProcessTree p = createProcessTree(pid); p.updateProcessTree(); // initialize LOG.info("ProcessTree: " + p); File leaf = new File(lowestDescendant); // wait till lowest descendant process of Rougue Task starts execution while (!leaf.exists()) { try { Thread.sleep(500); } catch (InterruptedException ie) { break; } } p.updateProcessTree(); // reconstruct LOG.info("ProcessTree: " + p); // Verify the orphaned pid is In process tree String lostpid = getPidFromPidFile(lostDescendant); LOG.info("Orphaned pid: " + lostpid); assertTrue(p.contains(lostpid), "Child process owned by init escaped process tree."); // Get the process-tree dump String processTreeDump = p.getProcessTreeDump(); // destroy the process and all its subprocesses destroyProcessTree(pid); boolean isAlive = true; for (int tries = 100; tries > 0; tries--) { if (isSetsidAvailable()) {// whole processtree isAlive = isAnyProcessInTreeAlive(p); } else {// process isAlive = isAlive(pid); } if (!isAlive) { break; } Thread.sleep(100); } if (isAlive) { fail("ProcessTree shouldn't be alive"); } LOG.info("Process-tree dump follows: \n" + processTreeDump); assertTrue(processTreeDump.startsWith("\t|- PID PPID PGRPID SESSID CMD_NAME " + "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) " + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n"), "Process-tree dump doesn't start with a proper header"); for (int i = N; i >= 0; i--) { String cmdLineDump = "\\|- [0-9]+ [0-9]+ [0-9]+ [0-9]+ \\(sh\\)" + " [0-9]+ [0-9]+ [0-9]+ [0-9]+ sh " + shellScript + " " + i; Pattern pat = Pattern.compile(cmdLineDump); Matcher mat = pat.matcher(processTreeDump); assertTrue(mat.find(), "Process-tree dump doesn't contain the cmdLineDump of " + i + "th process!"); } // Not able to join thread sometimes when forking with large N. try { t.join(2000); LOG.info("RogueTaskThread successfully joined."); } catch (InterruptedException ie) { LOG.info("Interrupted while joining RogueTaskThread."); } // ProcessTree is gone now. Any further calls should be sane. p.updateProcessTree(); assertFalse(isAlive(pid), "ProcessTree must have been gone"); assertTrue( p.getVirtualMemorySize() == UNAVAILABLE, "vmem for the gone-process is " + p.getVirtualMemorySize() + " . It should be UNAVAILABLE(-1)."); assertEquals("[ ]", p.toString()); } protected ProcfsBasedProcessTree createProcessTree(String pid) { return new ProcfsBasedProcessTree(pid); } protected ProcfsBasedProcessTree createProcessTree(String pid, String procfsRootDir, Clock clock) { return new ProcfsBasedProcessTree(pid, procfsRootDir, clock); } protected void destroyProcessTree(String pid) throws IOException { sendSignal("-"+pid, 9); } /** * Get PID from a pid-file. * * @param pidFileName * Name of the pid-file. * @return the PID string read from the pid-file. Returns null if the * pidFileName points to a non-existing file or if read fails from the * file. */ public static String getPidFromPidFile(String pidFileName) { BufferedReader pidFile = null; FileReader fReader = null; String pid = null; try { fReader = new FileReader(pidFileName); pidFile = new BufferedReader(fReader); } catch (FileNotFoundException f) { LOG.debug("PidFile doesn't exist : {}", pidFileName); return pid; } try { pid = pidFile.readLine(); } catch (IOException i) { LOG.error("Failed to read from " + pidFileName); } finally { try { if (fReader != null) { fReader.close(); } try { if (pidFile != null) { pidFile.close(); } } catch (IOException i) { LOG.warn("Error closing the stream " + pidFile); } } catch (IOException i) { LOG.warn("Error closing the stream " + fReader); } } return pid; } public static class ProcessStatInfo { // sample stat in a single line : 3910 (gpm) S 1 3910 3910 0 -1 4194624 // 83 0 0 0 0 0 0 0 16 0 1 0 7852 2408448 88 4294967295 134512640 // 134590050 3220521392 3220520036 10975138 0 0 4096 134234626 // 4294967295 0 0 17 1 0 0 String pid; String name; String ppid; String pgrpId; String session; String vmem = "0"; String rssmemPage = "0"; String utime = "0"; String stime = "0"; public ProcessStatInfo(String[] statEntries) { pid = statEntries[0]; name = statEntries[1]; ppid = statEntries[2]; pgrpId = statEntries[3]; session = statEntries[4]; vmem = statEntries[5]; if (statEntries.length > 6) { rssmemPage = statEntries[6]; } if (statEntries.length > 7) { utime = statEntries[7]; stime = statEntries[8]; } } // construct a line that mimics the procfs stat file. // all unused numerical entries are set to 0. public String getStatLine() { return String.format("%s (%s) S %s %s %s 0 0 0" + " 0 0 0 0 %s %s 0 0 0 0 0 0 0 %s %s 0 0" + " 0 0 0 0 0 0 0 0" + " 0 0 0 0 0", pid, name, ppid, pgrpId, session, utime, stime, vmem, rssmemPage); } } public ProcessSmapMemoryInfo constructMemoryMappingInfo(String address, String[] entries) { ProcessSmapMemoryInfo info = new ProcessSmapMemoryInfo(address); info.setMemInfo(MemInfo.SIZE.name(), entries[0]); info.setMemInfo(MemInfo.RSS.name(), entries[1]); info.setMemInfo(MemInfo.PSS.name(), entries[2]); info.setMemInfo(MemInfo.SHARED_CLEAN.name(), entries[3]); info.setMemInfo(MemInfo.SHARED_DIRTY.name(), entries[4]); info.setMemInfo(MemInfo.PRIVATE_CLEAN.name(), entries[5]); info.setMemInfo(MemInfo.PRIVATE_DIRTY.name(), entries[6]); info.setMemInfo(MemInfo.REFERENCED.name(), entries[7]); info.setMemInfo(MemInfo.ANONYMOUS.name(), entries[8]); info.setMemInfo(MemInfo.ANON_HUGE_PAGES.name(), entries[9]); info.setMemInfo(MemInfo.SWAP.name(), entries[10]); info.setMemInfo(MemInfo.KERNEL_PAGE_SIZE.name(), entries[11]); info.setMemInfo(MemInfo.MMU_PAGE_SIZE.name(), entries[12]); return info; } public void createMemoryMappingInfo(ProcessTreeSmapMemInfo[] procMemInfo) { for (int i = 0; i < procMemInfo.length; i++) { // Construct 4 memory mappings per process. // As per min(Shared_Dirty, Pss) + Private_Clean + Private_Dirty // and not including r--s, r-xs, we should get 100 KB per process List<ProcessSmapMemoryInfo> memoryMappingList = procMemInfo[i].getMemoryInfoList(); memoryMappingList.add(constructMemoryMappingInfo( "7f56c177c000-7f56c177d000 " + "rw-p 00010000 08:02 40371558 " + "/grid/0/jdk1.7.0_25/jre/lib/amd64/libnio.so", // Format: size, rss, pss, shared_clean, shared_dirty, private_clean // private_dirty, referenced, anon, anon-huge-pages, swap, // kernel_page_size, mmu_page_size new String[] {"4", "4", "25", "4", "25", "15", "10", "4", "10", "0", "0", "4", "4"})); memoryMappingList.add(constructMemoryMappingInfo( "7fb09382e000-7fb09382f000 r--s 00003000 " + "08:02 25953545", new String[] {"4", "4", "25", "4", "0", "15", "10", "4", "10", "0", "0", "4", "4"})); memoryMappingList.add(constructMemoryMappingInfo( "7e8790000-7e8b80000 r-xs 00000000 00:00 0", new String[] {"4", "4", "25", "4", "0", "15", "10", "4", "10", "0", "0", "4", "4"})); memoryMappingList.add(constructMemoryMappingInfo( "7da677000-7e0dcf000 rw-p 00000000 00:00 0", new String[] {"4", "4", "25", "4", "50", "15", "10", "4", "10", "0", "0", "4", "4"})); } } /** * A basic test that creates a few process directories and writes stat files. * Verifies that the cpu time and memory is correctly computed. * * @throws IOException * if there was a problem setting up the fake procfs directories or * files. */ @Test @Timeout(30000) void testCpuAndMemoryForProcessTree() throws IOException { // test processes String[] pids = {"100", "200", "300", "400"}; ControlledClock testClock = new ControlledClock(); testClock.setTime(0); // create the fake procfs root directory. File procfsRootDir = new File(TEST_ROOT_DIR, "proc"); try { setupProcfsRootDir(procfsRootDir); setupPidDirs(procfsRootDir, pids); // create stat objects. // assuming processes 100, 200, 300 are in tree and 400 is not. ProcessStatInfo[] procInfos = new ProcessStatInfo[4]; procInfos[0] = new ProcessStatInfo(new String[]{"100", "proc1", "1", "100", "100", "100000", "100", "1000", "200"}); procInfos[1] = new ProcessStatInfo(new String[]{"200", "process two", "100", "100", "100", "200000", "200", "2000", "400"}); procInfos[2] = new ProcessStatInfo(new String[]{"300", "proc(3)", "200", "100", "100", "300000", "300", "3000", "600"}); procInfos[3] = new ProcessStatInfo(new String[]{"400", "proc4", "1", "400", "400", "400000", "400", "4000", "800"}); ProcessTreeSmapMemInfo[] memInfo = new ProcessTreeSmapMemInfo[4]; memInfo[0] = new ProcessTreeSmapMemInfo("100"); memInfo[1] = new ProcessTreeSmapMemInfo("200"); memInfo[2] = new ProcessTreeSmapMemInfo("300"); memInfo[3] = new ProcessTreeSmapMemInfo("400"); createMemoryMappingInfo(memInfo); writeStatFiles(procfsRootDir, pids, procInfos, memInfo); // crank up the process tree class. Configuration conf = new Configuration(); ProcfsBasedProcessTree processTree = createProcessTree("100", procfsRootDir.getAbsolutePath(), testClock); processTree.setConf(conf); // build the process tree. processTree.updateProcessTree(); // verify virtual memory assertEquals(600000L, processTree.getVirtualMemorySize(), "Virtual memory does not match"); // verify rss memory long cumuRssMem = ProcfsBasedProcessTree.PAGE_SIZE > 0 ? 600L * ProcfsBasedProcessTree.PAGE_SIZE : ResourceCalculatorProcessTree.UNAVAILABLE; assertEquals(cumuRssMem, processTree.getRssMemorySize(), "rss memory does not match"); // verify cumulative cpu time long cumuCpuTime = ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS > 0 ? 7200L * ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS : 0L; assertEquals(cumuCpuTime, processTree.getCumulativeCpuTime(), "Cumulative cpu time does not match"); // verify CPU usage assertEquals(-1.0, processTree.getCpuUsagePercent(), 0.01, "Percent CPU time should be set to -1 initially"); // Check by enabling smaps setSmapsInProceTree(processTree, true); // anon (exclude r-xs,r--s) assertEquals((20 * KB_TO_BYTES * 3), processTree.getRssMemorySize(), "rss memory does not match"); // test the cpu time again to see if it cumulates procInfos[0] = new ProcessStatInfo(new String[]{"100", "proc1", "1", "100", "100", "100000", "100", "2000", "300"}); procInfos[1] = new ProcessStatInfo(new String[]{"200", "process two", "100", "100", "100", "200000", "200", "3000", "500"}); writeStatFiles(procfsRootDir, pids, procInfos, memInfo); long elapsedTimeBetweenUpdatesMsec = 200000; testClock.setTime(elapsedTimeBetweenUpdatesMsec); // build the process tree. processTree.updateProcessTree(); // verify cumulative cpu time again long prevCumuCpuTime = cumuCpuTime; cumuCpuTime = ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS > 0 ? 9400L * ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS : 0L; assertEquals(cumuCpuTime, processTree.getCumulativeCpuTime(), "Cumulative cpu time does not match"); double expectedCpuUsagePercent = (ProcfsBasedProcessTree.JIFFY_LENGTH_IN_MILLIS > 0) ? (cumuCpuTime - prevCumuCpuTime) * 100.0 / elapsedTimeBetweenUpdatesMsec : 0; // expectedCpuUsagePercent is given by (94000L - 72000) * 100/ // 200000; // which in this case is 11. Lets verify that first assertEquals(11, expectedCpuUsagePercent, 0.001); assertEquals(expectedCpuUsagePercent, processTree.getCpuUsagePercent(), 0.01, "Percent CPU time is not correct expected " + expectedCpuUsagePercent); } finally { FileUtil.fullyDelete(procfsRootDir); } } private void setSmapsInProceTree(ProcfsBasedProcessTree processTree, boolean enableFlag) { Configuration conf = processTree.getConf(); if (conf == null) { conf = new Configuration(); } conf.setBoolean(YarnConfiguration.PROCFS_USE_SMAPS_BASED_RSS_ENABLED, enableFlag); processTree.setConf(conf); processTree.updateProcessTree(); } /** * Tests that cumulative memory is computed only for processes older than a * given age. * * @throws IOException * if there was a problem setting up the fake procfs directories or * files. */ @Test @Timeout(30000) void testMemForOlderProcesses() throws IOException { testMemForOlderProcesses(false); testMemForOlderProcesses(true); } private void testMemForOlderProcesses(boolean smapEnabled) throws IOException { // initial list of processes String[] pids = { "100", "200", "300", "400" }; // create the fake procfs root directory. File procfsRootDir = new File(TEST_ROOT_DIR, "proc"); try { setupProcfsRootDir(procfsRootDir); setupPidDirs(procfsRootDir, pids); // create stat objects. // assuming 100, 200 and 400 are in tree, 300 is not. ProcessStatInfo[] procInfos = new ProcessStatInfo[4]; procInfos[0] = new ProcessStatInfo(new String[]{"100", "proc1", "1", "100", "100", "100000", "100"}); procInfos[1] = new ProcessStatInfo(new String[]{"200", "process two", "100", "100", "100", "200000", "200"}); procInfos[2] = new ProcessStatInfo(new String[]{"300", "proc(3)", "1", "300", "300", "300000", "300"}); procInfos[3] = new ProcessStatInfo(new String[]{"400", "proc4", "100", "100", "100", "400000", "400"}); // write smap information invariably for testing ProcessTreeSmapMemInfo[] memInfo = new ProcessTreeSmapMemInfo[4]; memInfo[0] = new ProcessTreeSmapMemInfo("100"); memInfo[1] = new ProcessTreeSmapMemInfo("200"); memInfo[2] = new ProcessTreeSmapMemInfo("300"); memInfo[3] = new ProcessTreeSmapMemInfo("400"); createMemoryMappingInfo(memInfo); writeStatFiles(procfsRootDir, pids, procInfos, memInfo); // crank up the process tree class. ProcfsBasedProcessTree processTree = createProcessTree("100", procfsRootDir.getAbsolutePath(), SystemClock.getInstance()); setSmapsInProceTree(processTree, smapEnabled); // verify virtual memory assertEquals(700000L, processTree.getVirtualMemorySize(), "Virtual memory does not match"); // write one more process as child of 100. String[] newPids = { "500" }; setupPidDirs(procfsRootDir, newPids); ProcessStatInfo[] newProcInfos = new ProcessStatInfo[1]; newProcInfos[0] = new ProcessStatInfo(new String[] { "500", "proc5", "100", "100", "100", "500000", "500" }); ProcessTreeSmapMemInfo[] newMemInfos = new ProcessTreeSmapMemInfo[1]; newMemInfos[0] = new ProcessTreeSmapMemInfo("500"); createMemoryMappingInfo(newMemInfos); writeStatFiles(procfsRootDir, newPids, newProcInfos, newMemInfos); // check memory includes the new process. processTree.updateProcessTree(); assertEquals(1200000L, processTree.getVirtualMemorySize(), "vmem does not include new process"); if (!smapEnabled) { long cumuRssMem = ProcfsBasedProcessTree.PAGE_SIZE > 0 ? 1200L * ProcfsBasedProcessTree.PAGE_SIZE : ResourceCalculatorProcessTree.UNAVAILABLE; assertEquals(cumuRssMem, processTree.getRssMemorySize(), "rssmem does not include new process"); } else { assertEquals(20 * KB_TO_BYTES * 4, processTree.getRssMemorySize(), "rssmem does not include new process"); } // however processes older than 1 iteration will retain the older value assertEquals(700000L, processTree.getVirtualMemorySize(1), "vmem shouldn't have included new process"); if (!smapEnabled) { long cumuRssMem = ProcfsBasedProcessTree.PAGE_SIZE > 0 ? 700L * ProcfsBasedProcessTree.PAGE_SIZE : ResourceCalculatorProcessTree.UNAVAILABLE; assertEquals(cumuRssMem, processTree.getRssMemorySize(1), "rssmem shouldn't have included new process"); } else { assertEquals(20 * KB_TO_BYTES * 3, processTree.getRssMemorySize(1), "rssmem shouldn't have included new process"); } // one more process newPids = new String[] { "600" }; setupPidDirs(procfsRootDir, newPids); newProcInfos = new ProcessStatInfo[1]; newProcInfos[0] = new ProcessStatInfo(new String[] { "600", "proc6", "100", "100", "100", "600000", "600" }); newMemInfos = new ProcessTreeSmapMemInfo[1]; newMemInfos[0] = new ProcessTreeSmapMemInfo("600"); createMemoryMappingInfo(newMemInfos); writeStatFiles(procfsRootDir, newPids, newProcInfos, newMemInfos); // refresh process tree processTree.updateProcessTree(); // processes older than 2 iterations should be same as before. assertEquals(700000L, processTree.getVirtualMemorySize(2), "vmem shouldn't have included new processes"); if (!smapEnabled) { long cumuRssMem = ProcfsBasedProcessTree.PAGE_SIZE > 0 ? 700L * ProcfsBasedProcessTree.PAGE_SIZE : ResourceCalculatorProcessTree.UNAVAILABLE; assertEquals(cumuRssMem, processTree.getRssMemorySize(2), "rssmem shouldn't have included new processes"); } else { assertEquals(20 * KB_TO_BYTES * 3, processTree.getRssMemorySize(2), "rssmem shouldn't have included new processes"); } // processes older than 1 iteration should not include new process, // but include process 500 assertEquals(1200000L, processTree.getVirtualMemorySize(1), "vmem shouldn't have included new processes"); if (!smapEnabled) { long cumuRssMem = ProcfsBasedProcessTree.PAGE_SIZE > 0 ? 1200L * ProcfsBasedProcessTree.PAGE_SIZE : ResourceCalculatorProcessTree.UNAVAILABLE; assertEquals(cumuRssMem, processTree.getRssMemorySize(1), "rssmem shouldn't have included new processes"); } else { assertEquals(20 * KB_TO_BYTES * 4, processTree.getRssMemorySize(1), "rssmem shouldn't have included new processes"); } // no processes older than 3 iterations assertEquals(0, processTree.getVirtualMemorySize(3), "Getting non-zero vmem for processes older than 3 iterations"); assertEquals(0, processTree.getRssMemorySize(3), "Getting non-zero rssmem for processes older than 3 iterations"); } finally { FileUtil.fullyDelete(procfsRootDir); } } /** * Verifies ProcfsBasedProcessTree.checkPidPgrpidForMatch() in case of * 'constructProcessInfo() returning null' by not writing stat file for the * mock process * * @throws IOException * if there was a problem setting up the fake procfs directories or * files. */ @Test @Timeout(30000) void testDestroyProcessTree() throws IOException { // test process String pid = "100"; // create the fake procfs root directory. File procfsRootDir = new File(TEST_ROOT_DIR, "proc"); try { setupProcfsRootDir(procfsRootDir); // crank up the process tree class. createProcessTree(pid, procfsRootDir.getAbsolutePath(), SystemClock.getInstance()); // Let us not create stat file for pid 100. assertTrue(ProcfsBasedProcessTree.checkPidPgrpidForMatch(pid, procfsRootDir.getAbsolutePath())); } finally { FileUtil.fullyDelete(procfsRootDir); } } /** * Test the correctness of process-tree dump. * * @throws IOException */ @Test @Timeout(30000) void testProcessTreeDump() throws IOException { String[] pids = {"100", "200", "300", "400", "500", "600"}; File procfsRootDir = new File(TEST_ROOT_DIR, "proc"); try { setupProcfsRootDir(procfsRootDir); setupPidDirs(procfsRootDir, pids); int numProcesses = pids.length; // Processes 200, 300, 400 and 500 are descendants of 100. 600 is not. ProcessStatInfo[] procInfos = new ProcessStatInfo[numProcesses]; procInfos[0] = new ProcessStatInfo(new String[]{"100", "proc1", "1", "100", "100", "100000", "100", "1000", "200"}); procInfos[1] = new ProcessStatInfo(new String[]{"200", "process two", "100", "100", "100", "200000", "200", "2000", "400"}); procInfos[2] = new ProcessStatInfo(new String[]{"300", "proc(3)", "200", "100", "100", "300000", "300", "3000", "600"}); procInfos[3] = new ProcessStatInfo(new String[]{"400", "proc4", "200", "100", "100", "400000", "400", "4000", "800"}); procInfos[4] = new ProcessStatInfo(new String[]{"500", "proc5", "400", "100", "100", "400000", "400", "4000", "800"}); procInfos[5] = new ProcessStatInfo(new String[]{"600", "proc6", "1", "1", "1", "400000", "400", "4000", "800"}); ProcessTreeSmapMemInfo[] memInfos = new ProcessTreeSmapMemInfo[6]; memInfos[0] = new ProcessTreeSmapMemInfo("100"); memInfos[1] = new ProcessTreeSmapMemInfo("200"); memInfos[2] = new ProcessTreeSmapMemInfo("300"); memInfos[3] = new ProcessTreeSmapMemInfo("400"); memInfos[4] = new ProcessTreeSmapMemInfo("500"); memInfos[5] = new ProcessTreeSmapMemInfo("600"); String[] cmdLines = new String[numProcesses]; cmdLines[0] = "proc1 arg1 arg2"; cmdLines[1] = "process two arg3 arg4"; cmdLines[2] = "proc(3) arg5 arg6"; cmdLines[3] = "proc4 arg7 arg8"; cmdLines[4] = "proc5 arg9 arg10"; cmdLines[5] = "proc6 arg11 arg12"; createMemoryMappingInfo(memInfos); writeStatFiles(procfsRootDir, pids, procInfos, memInfos); writeCmdLineFiles(procfsRootDir, pids, cmdLines); ProcfsBasedProcessTree processTree = createProcessTree("100", procfsRootDir.getAbsolutePath(), SystemClock.getInstance()); // build the process tree. processTree.updateProcessTree(); // Get the process-tree dump String processTreeDump = processTree.getProcessTreeDump(); LOG.info("Process-tree dump follows: \n" + processTreeDump); assertTrue(processTreeDump.startsWith("\t|- PID PPID PGRPID SESSID CMD_NAME " + "USER_MODE_TIME(MILLIS) SYSTEM_TIME(MILLIS) VMEM_USAGE(BYTES) " + "RSSMEM_USAGE(PAGES) FULL_CMD_LINE\n"), "Process-tree dump doesn't start with a proper header"); for (int i = 0; i < 5; i++) { ProcessStatInfo p = procInfos[i]; assertTrue( processTreeDump.contains("\t|- " + p.pid + " " + p.ppid + " " + p.pgrpId + " " + p.session + " (" + p.name + ") " + p.utime + " " + p.stime + " " + p.vmem + " " + p.rssmemPage + " " + cmdLines[i]), "Process-tree dump doesn't contain the cmdLineDump of process " + p.pid); } // 600 should not be in the dump ProcessStatInfo p = procInfos[5]; assertFalse( processTreeDump.contains("\t|- " + p.pid + " " + p.ppid + " " + p.pgrpId + " " + p.session + " (" + p.name + ") " + p.utime + " " + p.stime + " " + p.vmem + " " + cmdLines[5]), "Process-tree dump shouldn't contain the cmdLineDump of process " + p.pid); } finally { FileUtil.fullyDelete(procfsRootDir); } } protected static boolean isSetsidAvailable() { ShellCommandExecutor shexec = null; boolean setsidSupported = true; try { String[] args = { "setsid", "bash", "-c", "echo $$" }; shexec = new ShellCommandExecutor(args); shexec.execute(); } catch (IOException ioe) { LOG.warn("setsid is not available on this machine. So not using it."); setsidSupported = false; } finally { // handle the exit code LOG.info("setsid exited with exit code " + shexec.getExitCode()); } return setsidSupported; } /** * Is the root-process alive? Used only in tests. * * @return true if the root-process is alive, false otherwise. */ private static boolean isAlive(String pid) { try { final String sigpid = isSetsidAvailable() ? "-" + pid : pid; try { sendSignal(sigpid, 0); } catch (ExitCodeException e) { return false; } return true; } catch (IOException ignored) { } return false; } private static void sendSignal(String pid, int signal) throws IOException { ShellCommandExecutor shexec = null; String[] arg = { "kill", "-" + signal, "--", pid }; shexec = new ShellCommandExecutor(arg); shexec.execute(); } /** * Is any of the subprocesses in the process-tree alive? Used only in tests. * * @return true if any of the processes in the process-tree is alive, false * otherwise. */ private static boolean isAnyProcessInTreeAlive( ProcfsBasedProcessTree processTree) { for (String pId : processTree.getCurrentProcessIDs()) { if (isAlive(pId)) { return true; } } return false; } /** * Create a directory to mimic the procfs file system's root. * * @param procfsRootDir * root directory to create. * @throws IOException * if could not delete the procfs root directory */ public static void setupProcfsRootDir(File procfsRootDir) throws IOException { // cleanup any existing process root dir. if (procfsRootDir.exists()) { assertTrue(FileUtil.fullyDelete(procfsRootDir)); } // create afresh assertTrue(procfsRootDir.mkdirs()); } /** * Create PID directories under the specified procfs root directory * * @param procfsRootDir * root directory of procfs file system * @param pids * the PID directories to create. * @throws IOException * If PID dirs could not be created */ public static void setupPidDirs(File procfsRootDir, String[] pids) throws IOException { for (String pid : pids) { File pidDir = new File(procfsRootDir, pid); FileUtils.forceMkdir(pidDir); LOG.info("created pid dir: " + pidDir); } } /** * Write stat files under the specified pid directories with data setup in the * corresponding ProcessStatInfo objects * * @param procfsRootDir * root directory of procfs file system * @param pids * the PID directories under which to create the stat file * @param procs * corresponding ProcessStatInfo objects whose data should be written * to the stat files. * @throws IOException * if stat files could not be written */ public static void writeStatFiles(File procfsRootDir, String[] pids, ProcessStatInfo[] procs, ProcessTreeSmapMemInfo[] smaps) throws IOException { for (int i = 0; i < pids.length; i++) { File statFile = new File(new File(procfsRootDir, pids[i]), ProcfsBasedProcessTree.PROCFS_STAT_FILE); BufferedWriter bw = null; try { FileWriter fw = new FileWriter(statFile); bw = new BufferedWriter(fw); bw.write(procs[i].getStatLine()); LOG.info("wrote stat file for " + pids[i] + " with contents: " + procs[i].getStatLine()); } finally { // not handling exception - will throw an error and fail the test. if (bw != null) { bw.close(); } } if (smaps != null) { File smapFile = new File(new File(procfsRootDir, pids[i]), ProcfsBasedProcessTree.SMAPS); bw = null; try { FileWriter fw = new FileWriter(smapFile); bw = new BufferedWriter(fw); bw.write(smaps[i].toString()); bw.flush(); LOG.info("wrote smap file for " + pids[i] + " with contents: " + smaps[i].toString()); } finally { // not handling exception - will throw an error and fail the test. if (bw != null) { bw.close(); } } } } } private static void writeCmdLineFiles(File procfsRootDir, String[] pids, String[] cmdLines) throws IOException { for (int i = 0; i < pids.length; i++) { File statFile = new File(new File(procfsRootDir, pids[i]), ProcfsBasedProcessTree.PROCFS_CMDLINE_FILE); BufferedWriter bw = null; try { bw = new BufferedWriter(new FileWriter(statFile)); bw.write(cmdLines[i]); LOG.info("wrote command-line file for " + pids[i] + " with contents: " + cmdLines[i]); } finally { // not handling exception - will throw an error and fail the test. if (bw != null) { bw.close(); } } } } }
apache/hudi
37,471
hudi-hadoop-common/src/main/java/org/apache/hudi/common/util/AvroOrcUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hudi.common.util; import org.apache.hudi.exception.HoodieIOException; import org.apache.avro.Conversions; import org.apache.avro.LogicalType; import org.apache.avro.LogicalTypes; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericData.StringType; import org.apache.avro.generic.GenericRecord; import org.apache.avro.util.Utf8; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector; import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.orc.TypeDescription; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Base64; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.apache.avro.JsonProperties.NULL_VALUE; import static org.apache.hudi.common.util.BinaryUtil.toBytes; import static org.apache.hudi.common.util.StringUtils.getUTF8Bytes; /** * Methods including addToVector, addUnionValue, createOrcSchema are originally from * https://github.com/streamsets/datacollector. * Source classes: * - com.streamsets.pipeline.lib.util.avroorc.AvroToOrcRecordConverter * - com.streamsets.pipeline.lib.util.avroorc.AvroToOrcSchemaConverter * * Changes made: * 1. Flatten nullable Avro schema type when the value is not null in `addToVector`. * 2. Use getLogicalType(), constants from LogicalTypes instead of getJsonProp() to handle Avro logical types. */ public class AvroOrcUtils { private static final int MICROS_PER_MILLI = 1000; private static final int NANOS_PER_MICRO = 1000; /** * Add an object (of a given ORC type) to the column vector at a given position. * * @param type ORC schema of the value Object. * @param colVector The column vector to store the value Object. * @param avroSchema Avro schema of the value Object. * Only used to check logical types for timestamp unit conversion. * @param value Object to be added to the column vector * @param vectorPos The position in the vector where value will be stored at. */ public static void addToVector(TypeDescription type, ColumnVector colVector, Schema avroSchema, Object value, int vectorPos) { final int currentVecLength = colVector.isNull.length; if (vectorPos >= currentVecLength) { colVector.ensureSize(2 * currentVecLength, true); } if (value == null) { colVector.isNull[vectorPos] = true; colVector.noNulls = false; return; } if (avroSchema.getType().equals(Schema.Type.UNION)) { avroSchema = getActualSchemaType(avroSchema); } LogicalType logicalType = avroSchema != null ? avroSchema.getLogicalType() : null; switch (type.getCategory()) { case BOOLEAN: LongColumnVector boolVec = (LongColumnVector) colVector; boolVec.vector[vectorPos] = (boolean) value ? 1 : 0; break; case BYTE: LongColumnVector byteColVec = (LongColumnVector) colVector; byteColVec.vector[vectorPos] = (byte) value; break; case SHORT: LongColumnVector shortColVec = (LongColumnVector) colVector; shortColVec.vector[vectorPos] = (short) value; break; case INT: // the Avro logical type could be AvroTypeUtil.LOGICAL_TYPE_TIME_MILLIS, but we will ignore that fact here // since Orc has no way to represent a time in the way Avro defines it; we will simply preserve the int value LongColumnVector intColVec = (LongColumnVector) colVector; intColVec.vector[vectorPos] = (int) value; break; case LONG: // the Avro logical type could be AvroTypeUtil.LOGICAL_TYPE_TIME_MICROS, but we will ignore that fact here // since Orc has no way to represent a time in the way Avro defines it; we will simply preserve the long value LongColumnVector longColVec = (LongColumnVector) colVector; longColVec.vector[vectorPos] = (long) value; break; case FLOAT: DoubleColumnVector floatColVec = (DoubleColumnVector) colVector; floatColVec.vector[vectorPos] = (float) value; break; case DOUBLE: DoubleColumnVector doubleColVec = (DoubleColumnVector) colVector; doubleColVec.vector[vectorPos] = (double) value; break; case VARCHAR: case CHAR: case STRING: BytesColumnVector bytesColVec = (BytesColumnVector) colVector; byte[] bytes = null; if (value instanceof String) { bytes = getUTF8Bytes((String) value); } else if (value instanceof Utf8) { final Utf8 utf8 = (Utf8) value; bytes = utf8.getBytes(); } else if (value instanceof GenericData.EnumSymbol) { bytes = getUTF8Bytes(value.toString()); } else { throw new IllegalStateException(String.format( "Unrecognized type for Avro %s field value, which has type %s, value %s", type.getCategory().getName(), value.getClass().getName(), value )); } if (bytes == null) { bytesColVec.isNull[vectorPos] = true; bytesColVec.noNulls = false; } else { bytesColVec.setRef(vectorPos, bytes, 0, bytes.length); } break; case DATE: LongColumnVector dateColVec = (LongColumnVector) colVector; int daysSinceEpoch; if (logicalType instanceof LogicalTypes.Date) { daysSinceEpoch = (int) value; } else if (value instanceof java.sql.Date) { daysSinceEpoch = DateWritable.dateToDays((java.sql.Date) value); } else if (value instanceof Date) { daysSinceEpoch = DateWritable.millisToDays(((Date) value).getTime()); } else { throw new IllegalStateException(String.format( "Unrecognized type for Avro DATE field value, which has type %s, value %s", value.getClass().getName(), value )); } dateColVec.vector[vectorPos] = daysSinceEpoch; break; case TIMESTAMP: TimestampColumnVector tsColVec = (TimestampColumnVector) colVector; long time; int nanos = 0; // The unit for Timestamp in ORC is millis, convert timestamp to millis if needed if (logicalType instanceof LogicalTypes.TimestampMillis) { time = (long) value; } else if (logicalType instanceof LogicalTypes.TimestampMicros) { final long logicalTsValue = (long) value; time = logicalTsValue / MICROS_PER_MILLI; nanos = NANOS_PER_MICRO * ((int) (logicalTsValue % MICROS_PER_MILLI)); } else if (value instanceof Timestamp) { Timestamp tsValue = (Timestamp) value; time = tsValue.getTime(); nanos = tsValue.getNanos(); } else if (value instanceof java.sql.Date) { java.sql.Date sqlDateValue = (java.sql.Date) value; time = sqlDateValue.getTime(); } else if (value instanceof Date) { Date dateValue = (Date) value; time = dateValue.getTime(); } else { throw new IllegalStateException(String.format( "Unrecognized type for Avro TIMESTAMP field value, which has type %s, value %s", value.getClass().getName(), value )); } tsColVec.time[vectorPos] = time; tsColVec.nanos[vectorPos] = nanos; break; case BINARY: BytesColumnVector binaryColVec = (BytesColumnVector) colVector; byte[] binaryBytes; if (value instanceof GenericData.Fixed) { binaryBytes = ((GenericData.Fixed)value).bytes(); } else if (value instanceof ByteBuffer) { final ByteBuffer byteBuffer = (ByteBuffer) value; binaryBytes = toBytes(byteBuffer); } else if (value instanceof byte[]) { binaryBytes = (byte[]) value; } else { throw new IllegalStateException(String.format( "Unrecognized type for Avro BINARY field value, which has type %s, value %s", value.getClass().getName(), value )); } binaryColVec.setRef(vectorPos, binaryBytes, 0, binaryBytes.length); break; case DECIMAL: DecimalColumnVector decimalColVec = (DecimalColumnVector) colVector; HiveDecimal decimalValue; if (value instanceof BigDecimal) { final BigDecimal decimal = (BigDecimal) value; decimalValue = HiveDecimal.create(decimal); } else if (value instanceof ByteBuffer) { final ByteBuffer byteBuffer = (ByteBuffer) value; final byte[] decimalBytes = new byte[byteBuffer.remaining()]; byteBuffer.get(decimalBytes); final BigInteger bigInt = new BigInteger(decimalBytes); final int scale = type.getScale(); BigDecimal bigDecVal = new BigDecimal(bigInt, scale); decimalValue = HiveDecimal.create(bigDecVal); if (decimalValue == null && decimalBytes.length > 0) { throw new IllegalStateException( "Unexpected read null HiveDecimal from bytes (base-64 encoded): " + Base64.getEncoder().encodeToString(decimalBytes) ); } } else if (value instanceof GenericData.Fixed) { final BigDecimal decimal = new Conversions.DecimalConversion() .fromFixed((GenericData.Fixed) value, avroSchema, logicalType); decimalValue = HiveDecimal.create(decimal); } else { throw new IllegalStateException(String.format( "Unexpected type for decimal (%s), cannot convert from Avro value", value.getClass().getCanonicalName() )); } if (decimalValue == null) { decimalColVec.isNull[vectorPos] = true; decimalColVec.noNulls = false; } else { decimalColVec.set(vectorPos, decimalValue); } break; case LIST: List<?> list = (List<?>) value; ListColumnVector listColVec = (ListColumnVector) colVector; listColVec.offsets[vectorPos] = listColVec.childCount; listColVec.lengths[vectorPos] = list.size(); TypeDescription listType = type.getChildren().get(0); for (Object listItem : list) { addToVector(listType, listColVec.child, avroSchema.getElementType(), listItem, listColVec.childCount++); } break; case MAP: Map<String, ?> mapValue = (Map<String, ?>) value; MapColumnVector mapColumnVector = (MapColumnVector) colVector; mapColumnVector.offsets[vectorPos] = mapColumnVector.childCount; mapColumnVector.lengths[vectorPos] = mapValue.size(); // keys are always strings Schema keySchema = Schema.create(Schema.Type.STRING); for (Map.Entry<String, ?> entry : mapValue.entrySet()) { addToVector( type.getChildren().get(0), mapColumnVector.keys, keySchema, entry.getKey(), mapColumnVector.childCount ); addToVector( type.getChildren().get(1), mapColumnVector.values, avroSchema.getValueType(), entry.getValue(), mapColumnVector.childCount ); mapColumnVector.childCount++; } break; case STRUCT: StructColumnVector structColVec = (StructColumnVector) colVector; GenericRecord record = (GenericRecord) value; for (int i = 0; i < type.getFieldNames().size(); i++) { String fieldName = type.getFieldNames().get(i); Object fieldValue = record.get(fieldName); TypeDescription fieldType = type.getChildren().get(i); addToVector(fieldType, structColVec.fields[i], avroSchema.getFields().get(i).schema(), fieldValue, vectorPos); } break; case UNION: UnionColumnVector unionColVec = (UnionColumnVector) colVector; List<TypeDescription> childTypes = type.getChildren(); boolean added = addUnionValue(unionColVec, childTypes, avroSchema, value, vectorPos); if (!added) { throw new IllegalStateException(String.format( "Failed to add value %s to union with type %s", value == null ? "null" : value.toString(), type )); } break; default: throw new IllegalArgumentException("Invalid TypeDescription " + type + "."); } } /** * Match value with its ORC type and add to the union vector at a given position. * * @param unionVector The vector to store value. * @param unionChildTypes All possible types for the value Object. * @param avroSchema Avro union schema for the value Object. * @param value Object to be added to the unionVector * @param vectorPos The position in the vector where value will be stored at. * @return succeeded or failed */ public static boolean addUnionValue( UnionColumnVector unionVector, List<TypeDescription> unionChildTypes, Schema avroSchema, Object value, int vectorPos ) { int matchIndex = -1; TypeDescription matchType = null; Object matchValue = null; for (int t = 0; t < unionChildTypes.size(); t++) { TypeDescription childType = unionChildTypes.get(t); boolean matches = false; switch (childType.getCategory()) { case BOOLEAN: matches = value instanceof Boolean; break; case BYTE: matches = value instanceof Byte; break; case SHORT: matches = value instanceof Short; break; case INT: matches = value instanceof Integer; break; case LONG: matches = value instanceof Long; break; case FLOAT: matches = value instanceof Float; break; case DOUBLE: matches = value instanceof Double; break; case STRING: case VARCHAR: case CHAR: if (value instanceof String) { matches = true; matchValue = getUTF8Bytes((String) value); } else if (value instanceof Utf8) { matches = true; matchValue = ((Utf8) value).getBytes(); } break; case DATE: matches = value instanceof Date; break; case TIMESTAMP: matches = value instanceof Timestamp; break; case BINARY: matches = value instanceof byte[] || value instanceof GenericData.Fixed; break; case DECIMAL: matches = value instanceof BigDecimal; break; case LIST: matches = value instanceof List; break; case MAP: matches = value instanceof Map; break; case STRUCT: throw new UnsupportedOperationException("Cannot handle STRUCT within UNION."); case UNION: List<TypeDescription> children = childType.getChildren(); if (value == null) { matches = children == null || children.size() == 0; } else { matches = addUnionValue(unionVector, children, avroSchema, value, vectorPos); } break; default: throw new IllegalArgumentException("Invalid TypeDescription " + childType.getCategory().toString() + "."); } if (matches) { matchIndex = t; matchType = childType; break; } } if (value == null && matchValue != null) { value = matchValue; } if (matchIndex >= 0) { unionVector.tags[vectorPos] = matchIndex; if (value == null) { unionVector.isNull[vectorPos] = true; unionVector.noNulls = false; } else { addToVector(matchType, unionVector.fields[matchIndex], avroSchema.getTypes().get(matchIndex), value, vectorPos); } return true; } else { return false; } } /** * Read the Column vector at a given position conforming to a given ORC schema. * * @param type ORC schema of the object to read. * @param colVector The column vector to read. * @param avroSchema Avro schema of the object to read. * Only used to check logical types for timestamp unit conversion. * @param vectorPos The position in the vector where the value to read is stored at. * @return The object being read. */ public static Object readFromVector(TypeDescription type, ColumnVector colVector, Schema avroSchema, int vectorPos) { if (colVector.isRepeating) { vectorPos = 0; } if (colVector.isNull[vectorPos]) { return null; } if (avroSchema.getType().equals(Schema.Type.UNION)) { avroSchema = getActualSchemaType(avroSchema); } LogicalType logicalType = avroSchema != null ? avroSchema.getLogicalType() : null; switch (type.getCategory()) { case BOOLEAN: return ((LongColumnVector) colVector).vector[vectorPos] != 0; case BYTE: return (byte) ((LongColumnVector) colVector).vector[vectorPos]; case SHORT: return (short) ((LongColumnVector) colVector).vector[vectorPos]; case INT: return (int) ((LongColumnVector) colVector).vector[vectorPos]; case LONG: return ((LongColumnVector) colVector).vector[vectorPos]; case FLOAT: return (float) ((DoubleColumnVector) colVector).vector[vectorPos]; case DOUBLE: return ((DoubleColumnVector) colVector).vector[vectorPos]; case VARCHAR: case CHAR: int maxLength = type.getMaxLength(); String result = ((BytesColumnVector) colVector).toString(vectorPos); if (result.length() <= maxLength) { return result; } else { throw new HoodieIOException("CHAR/VARCHAR has length " + result.length() + " greater than Max Length allowed"); } case STRING: String stringType = avroSchema.getProp(GenericData.STRING_PROP); Object parsedValue; if (stringType == null || !stringType.equals(StringType.String)) { int stringLength = ((BytesColumnVector) colVector).length[vectorPos]; int stringOffset = ((BytesColumnVector) colVector).start[vectorPos]; byte[] stringBytes = new byte[stringLength]; System.arraycopy(((BytesColumnVector) colVector).vector[vectorPos], stringOffset, stringBytes, 0, stringLength); parsedValue = new Utf8(stringBytes); } else { parsedValue = ((BytesColumnVector) colVector).toString(vectorPos); } if (avroSchema.getType() == Schema.Type.ENUM) { String enumValue = parsedValue.toString(); if (!enumValue.isEmpty()) { return new GenericData.EnumSymbol(avroSchema, enumValue); } } return parsedValue; case DATE: // convert to daysSinceEpoch for LogicalType.Date return (int) ((LongColumnVector) colVector).vector[vectorPos]; case TIMESTAMP: // The unit of time in ORC is millis. Convert (time,nanos) to the desired unit per logicalType long time = ((TimestampColumnVector) colVector).time[vectorPos]; int nanos = ((TimestampColumnVector) colVector).nanos[vectorPos]; if (logicalType instanceof LogicalTypes.TimestampMillis) { return time; } else if (logicalType instanceof LogicalTypes.TimestampMicros) { return time * MICROS_PER_MILLI + nanos / NANOS_PER_MICRO; } else { return ((TimestampColumnVector) colVector).getTimestampAsLong(vectorPos); } case BINARY: int binaryLength = ((BytesColumnVector) colVector).length[vectorPos]; int binaryOffset = ((BytesColumnVector) colVector).start[vectorPos]; byte[] binaryBytes = new byte[binaryLength]; System.arraycopy(((BytesColumnVector) colVector).vector[vectorPos], binaryOffset, binaryBytes, 0, binaryLength); // return a ByteBuffer to be consistent with AvroRecordConverter return ByteBuffer.wrap(binaryBytes); case DECIMAL: // HiveDecimal always ignores trailing zeros, thus modifies the scale implicitly, // therefore, the scale must be enforced here. BigDecimal bigDecimal = ((DecimalColumnVector) colVector).vector[vectorPos] .getHiveDecimal().bigDecimalValue() .setScale(((LogicalTypes.Decimal) logicalType).getScale()); Schema.Type baseType = avroSchema.getType(); if (baseType.equals(Schema.Type.FIXED)) { return new Conversions.DecimalConversion().toFixed(bigDecimal, avroSchema, logicalType); } else if (baseType.equals(Schema.Type.BYTES)) { return ByteBuffer.wrap(bigDecimal.unscaledValue().toByteArray()); } else { throw new HoodieIOException(baseType.getName() + "is not a valid type for LogicalTypes.DECIMAL."); } case LIST: ArrayList<Object> list = new ArrayList<>(); ListColumnVector listVector = (ListColumnVector) colVector; int listLength = (int) listVector.lengths[vectorPos]; int listOffset = (int) listVector.offsets[vectorPos]; list.ensureCapacity(listLength); TypeDescription childType = type.getChildren().get(0); for (int i = 0; i < listLength; i++) { list.add(readFromVector(childType, listVector.child, avroSchema.getElementType(), listOffset + i)); } return list; case MAP: Map<String, Object> map = new HashMap<String, Object>(); MapColumnVector mapVector = (MapColumnVector) colVector; int mapLength = (int) mapVector.lengths[vectorPos]; int mapOffset = (int) mapVector.offsets[vectorPos]; // keys are always strings for maps in Avro Schema keySchema = Schema.create(Schema.Type.STRING); for (int i = 0; i < mapLength; i++) { map.put( readFromVector(type.getChildren().get(0), mapVector.keys, keySchema, i + mapOffset).toString(), readFromVector(type.getChildren().get(1), mapVector.values, avroSchema.getValueType(), i + mapOffset)); } return map; case STRUCT: StructColumnVector structVector = (StructColumnVector) colVector; List<TypeDescription> children = type.getChildren(); GenericData.Record record = new GenericData.Record(avroSchema); for (int i = 0; i < children.size(); i++) { record.put(i, readFromVector(children.get(i), structVector.fields[i], avroSchema.getFields().get(i).schema(), vectorPos)); } return record; case UNION: UnionColumnVector unionVector = (UnionColumnVector) colVector; int tag = unionVector.tags[vectorPos]; ColumnVector fieldVector = unionVector.fields[tag]; return readFromVector(type.getChildren().get(tag), fieldVector, avroSchema.getTypes().get(tag), vectorPos); default: throw new HoodieIOException("Unrecognized TypeDescription " + type); } } public static TypeDescription createOrcSchema(Schema avroSchema) { LogicalType logicalType = avroSchema.getLogicalType(); if (logicalType != null) { if (logicalType instanceof LogicalTypes.Decimal) { return TypeDescription.createDecimal() .withPrecision(((LogicalTypes.Decimal) logicalType).getPrecision()) .withScale(((LogicalTypes.Decimal) logicalType).getScale()); } else if (logicalType instanceof LogicalTypes.Date) { // The date logical type represents a date within the calendar, with no reference to a particular time zone // or time of day. // // A date logical type annotates an Avro int, where the int stores the number of days from the unix epoch, 1 // January 1970 (ISO calendar). return TypeDescription.createDate(); } else if (logicalType instanceof LogicalTypes.TimeMillis) { // The time-millis logical type represents a time of day, with no reference to a particular calendar, time // zone or date, with a precision of one millisecond. // // A time-millis logical type annotates an Avro int, where the int stores the number of milliseconds after // midnight, 00:00:00.000. return TypeDescription.createInt(); } else if (logicalType instanceof LogicalTypes.TimeMicros) { // The time-micros logical type represents a time of day, with no reference to a particular calendar, time // zone or date, with a precision of one microsecond. // // A time-micros logical type annotates an Avro long, where the long stores the number of microseconds after // midnight, 00:00:00.000000. return TypeDescription.createLong(); } else if (logicalType instanceof LogicalTypes.TimestampMillis) { // The timestamp-millis logical type represents an instant on the global timeline, independent of a // particular time zone or calendar, with a precision of one millisecond. // // A timestamp-millis logical type annotates an Avro long, where the long stores the number of milliseconds // from the unix epoch, 1 January 1970 00:00:00.000 UTC. return TypeDescription.createTimestamp(); } else if (logicalType instanceof LogicalTypes.TimestampMicros) { // The timestamp-micros logical type represents an instant on the global timeline, independent of a // particular time zone or calendar, with a precision of one microsecond. // // A timestamp-micros logical type annotates an Avro long, where the long stores the number of microseconds // from the unix epoch, 1 January 1970 00:00:00.000000 UTC. return TypeDescription.createTimestamp(); } } final Schema.Type type = avroSchema.getType(); switch (type) { case NULL: // empty union represents null type final TypeDescription nullUnion = TypeDescription.createUnion(); return nullUnion; case LONG: return TypeDescription.createLong(); case INT: return TypeDescription.createInt(); case BYTES: return TypeDescription.createBinary(); case ARRAY: return TypeDescription.createList(createOrcSchema(avroSchema.getElementType())); case RECORD: final TypeDescription recordStruct = TypeDescription.createStruct(); for (Schema.Field field : avroSchema.getFields()) { final Schema fieldSchema = field.schema(); final TypeDescription fieldType = createOrcSchema(fieldSchema); if (fieldType != null) { recordStruct.addField(field.name(), fieldType); } } return recordStruct; case MAP: return TypeDescription.createMap( // in Avro maps, keys are always strings TypeDescription.createString(), createOrcSchema(avroSchema.getValueType()) ); case UNION: final List<Schema> nonNullMembers = avroSchema.getTypes().stream().filter( schema -> !Schema.Type.NULL.equals(schema.getType()) ).collect(Collectors.toList()); if (nonNullMembers.isEmpty()) { // no non-null union members; represent as an ORC empty union return TypeDescription.createUnion(); } else if (nonNullMembers.size() == 1) { // a single non-null union member // this is how Avro represents "nullable" types; as a union of the NULL type with another // since ORC already supports nullability of all types, just use the child type directly return createOrcSchema(nonNullMembers.get(0)); } else { // more than one non-null type; represent as an actual ORC union of them final TypeDescription union = TypeDescription.createUnion(); for (final Schema childSchema : nonNullMembers) { union.addUnionChild(createOrcSchema(childSchema)); } return union; } case STRING: return TypeDescription.createString(); case FLOAT: return TypeDescription.createFloat(); case DOUBLE: return TypeDescription.createDouble(); case BOOLEAN: return TypeDescription.createBoolean(); case ENUM: // represent as String for now return TypeDescription.createString(); case FIXED: return TypeDescription.createBinary(); default: throw new IllegalStateException(String.format("Unrecognized Avro type: %s", type.getName())); } } public static Schema createAvroSchema(TypeDescription orcSchema) { switch (orcSchema.getCategory()) { case BOOLEAN: return Schema.create(Schema.Type.BOOLEAN); case BYTE: // tinyint (8 bit), use int to hold it return Schema.create(Schema.Type.INT); case SHORT: // smallint (16 bit), use int to hold it return Schema.create(Schema.Type.INT); case INT: // the Avro logical type could be AvroTypeUtil.LOGICAL_TYPE_TIME_MILLIS, but there is no way to distinguish return Schema.create(Schema.Type.INT); case LONG: // the Avro logical type could be AvroTypeUtil.LOGICAL_TYPE_TIME_MICROS, but there is no way to distinguish return Schema.create(Schema.Type.LONG); case FLOAT: return Schema.create(Schema.Type.FLOAT); case DOUBLE: return Schema.create(Schema.Type.DOUBLE); case VARCHAR: case CHAR: case STRING: return Schema.create(Schema.Type.STRING); case DATE: Schema date = Schema.create(Schema.Type.INT); LogicalTypes.date().addToSchema(date); return date; case TIMESTAMP: // Cannot distinguish between TIMESTAMP_MILLIS and TIMESTAMP_MICROS // Assume TIMESTAMP_MILLIS because Timestamp in ORC is in millis Schema timestamp = Schema.create(Schema.Type.LONG); LogicalTypes.timestampMillis().addToSchema(timestamp); return timestamp; case BINARY: return Schema.create(Schema.Type.BYTES); case DECIMAL: Schema decimal = Schema.create(Schema.Type.BYTES); LogicalTypes.decimal(orcSchema.getPrecision(), orcSchema.getScale()).addToSchema(decimal); return decimal; case LIST: return Schema.createArray(createAvroSchema(orcSchema.getChildren().get(0))); case MAP: return Schema.createMap(createAvroSchema(orcSchema.getChildren().get(1))); case STRUCT: List<Field> childFields = new ArrayList<>(); for (int i = 0; i < orcSchema.getChildren().size(); i++) { TypeDescription childType = orcSchema.getChildren().get(i); String childName = orcSchema.getFieldNames().get(i); childFields.add(new Field(childName, createAvroSchema(childType), "", null)); } return Schema.createRecord(childFields); case UNION: return Schema.createUnion(orcSchema.getChildren().stream() .map(AvroOrcUtils::createAvroSchema) .collect(Collectors.toList())); default: throw new IllegalStateException(String.format("Unrecognized ORC type: %s", orcSchema.getCategory().getName())); } } /** * Returns the actual schema of a field. * * All types in ORC is nullable whereas Avro uses a union that contains the NULL type to imply * the nullability of an Avro type. To achieve consistency between the Avro and ORC schema, * non-NULL types are extracted from the union type. * @param unionSchema A schema of union type. * @return An Avro schema that is either NULL or a UNION without NULL fields. */ private static Schema getActualSchemaType(Schema unionSchema) { final List<Schema> nonNullMembers = unionSchema.getTypes().stream().filter( schema -> !Schema.Type.NULL.equals(schema.getType()) ).collect(Collectors.toList()); if (nonNullMembers.isEmpty()) { return Schema.create(Schema.Type.NULL); } else if (nonNullMembers.size() == 1) { return nonNullMembers.get(0); } else { return Schema.createUnion(nonNullMembers); } } public static Schema createAvroSchemaWithDefaultValue(TypeDescription orcSchema, String recordName, String namespace, boolean nullable) { Schema avroSchema = createAvroSchemaWithNamespace(orcSchema,recordName,namespace); List<Schema.Field> fields = new ArrayList<Schema.Field>(); List<Field> fieldList = avroSchema.getFields(); for (Field field : fieldList) { Schema fieldSchema = field.schema(); Schema nullableSchema = Schema.createUnion(Schema.create(Schema.Type.NULL),fieldSchema); if (nullable) { fields.add(new Schema.Field(field.name(), nullableSchema, null, NULL_VALUE)); } else { fields.add(new Schema.Field(field.name(), fieldSchema, null, null)); } } Schema schema = Schema.createRecord(recordName, null, null, false); schema.setFields(fields); return schema; } private static Schema createAvroSchemaWithNamespace(TypeDescription orcSchema, String recordName, String namespace) { switch (orcSchema.getCategory()) { case BOOLEAN: return Schema.create(Schema.Type.BOOLEAN); case BYTE: // tinyint (8 bit), use int to hold it return Schema.create(Schema.Type.INT); case SHORT: // smallint (16 bit), use int to hold it return Schema.create(Schema.Type.INT); case INT: // the Avro logical type could be AvroTypeUtil.LOGICAL_TYPE_TIME_MILLIS, but there is no way to distinguish return Schema.create(Schema.Type.INT); case LONG: // the Avro logical type could be AvroTypeUtil.LOGICAL_TYPE_TIME_MICROS, but there is no way to distinguish return Schema.create(Schema.Type.LONG); case FLOAT: return Schema.create(Schema.Type.FLOAT); case DOUBLE: return Schema.create(Schema.Type.DOUBLE); case VARCHAR: case CHAR: case STRING: return Schema.create(Schema.Type.STRING); case DATE: Schema date = Schema.create(Schema.Type.INT); LogicalTypes.date().addToSchema(date); return date; case TIMESTAMP: Schema timestamp = Schema.create(Schema.Type.LONG); LogicalTypes.timestampMillis().addToSchema(timestamp); return timestamp; case BINARY: return Schema.create(Schema.Type.BYTES); case DECIMAL: Schema decimal = Schema.create(Schema.Type.BYTES); LogicalTypes.decimal(orcSchema.getPrecision(), orcSchema.getScale()).addToSchema(decimal); return decimal; case LIST: return Schema.createArray(createAvroSchemaWithNamespace(orcSchema.getChildren().get(0), recordName, "")); case MAP: return Schema.createMap(createAvroSchemaWithNamespace(orcSchema.getChildren().get(1), recordName, "")); case STRUCT: List<Field> childFields = new ArrayList<>(); for (int i = 0; i < orcSchema.getChildren().size(); i++) { TypeDescription childType = orcSchema.getChildren().get(i); String childName = orcSchema.getFieldNames().get(i); childFields.add(new Field(childName, createAvroSchemaWithNamespace(childType, childName, ""), null, null)); } return Schema.createRecord(recordName, null, namespace, false, childFields); default: throw new IllegalStateException(String.format("Unrecognized ORC type: %s", orcSchema.getCategory().getName())); } } }
apache/jclouds
37,380
apis/openstack-swift/src/main/java/org/jclouds/openstack/swift/v1/blobstore/RegionScopedSwiftBlobStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.openstack.swift.v1.blobstore; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Iterables.tryFind; import static com.google.common.collect.Lists.transform; import static org.jclouds.Constants.PROPERTY_USER_THREADS; import static org.jclouds.blobstore.options.ListContainerOptions.Builder.recursive; import static org.jclouds.location.predicates.LocationPredicates.idEquals; import static org.jclouds.openstack.swift.v1.options.PutOptions.Builder.metadata; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.io.RandomAccessFile; import java.lang.reflect.Method; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import jakarta.annotation.Resource; import jakarta.inject.Inject; import jakarta.inject.Named; import org.jclouds.Constants; import org.jclouds.blobstore.BlobStore; import org.jclouds.blobstore.BlobStoreContext; import org.jclouds.blobstore.KeyNotFoundException; import org.jclouds.blobstore.domain.Blob; import org.jclouds.blobstore.domain.BlobAccess; import org.jclouds.blobstore.domain.BlobBuilder; import org.jclouds.blobstore.domain.BlobMetadata; import org.jclouds.blobstore.domain.ContainerAccess; import org.jclouds.blobstore.domain.MultipartPart; import org.jclouds.blobstore.domain.MultipartUpload; import org.jclouds.blobstore.domain.MutableBlobMetadata; import org.jclouds.blobstore.domain.PageSet; import org.jclouds.blobstore.domain.StorageMetadata; import org.jclouds.blobstore.domain.StorageType; import org.jclouds.blobstore.domain.internal.BlobBuilderImpl; import org.jclouds.blobstore.domain.internal.BlobImpl; import org.jclouds.blobstore.domain.internal.PageSetImpl; import org.jclouds.blobstore.functions.BlobToHttpGetOptions; import org.jclouds.blobstore.options.CopyOptions; import org.jclouds.blobstore.options.CreateContainerOptions; import org.jclouds.blobstore.options.GetOptions; import org.jclouds.blobstore.options.ListContainerOptions; import org.jclouds.blobstore.options.PutOptions; import org.jclouds.blobstore.strategy.ClearListStrategy; import org.jclouds.blobstore.strategy.internal.MultipartUploadSlicingAlgorithm; import org.jclouds.collect.Memoized; import org.jclouds.domain.Location; import org.jclouds.io.ContentMetadata; import org.jclouds.io.Payload; import org.jclouds.io.PayloadSlicer; import org.jclouds.io.payloads.ByteSourcePayload; import org.jclouds.logging.Logger; import org.jclouds.openstack.swift.v1.SwiftApi; import org.jclouds.openstack.swift.v1.blobstore.functions.ToBlobMetadata; import org.jclouds.openstack.swift.v1.blobstore.functions.ToListContainerOptions; import org.jclouds.openstack.swift.v1.blobstore.functions.ToResourceMetadata; import org.jclouds.openstack.swift.v1.domain.Container; import org.jclouds.openstack.swift.v1.domain.DeleteStaticLargeObjectResponse; import org.jclouds.openstack.swift.v1.domain.ObjectList; import org.jclouds.openstack.swift.v1.domain.Segment; import org.jclouds.openstack.swift.v1.domain.SwiftObject; import org.jclouds.openstack.swift.v1.features.BulkApi; import org.jclouds.openstack.swift.v1.features.ObjectApi; import org.jclouds.openstack.swift.v1.options.UpdateContainerOptions; import org.jclouds.openstack.swift.v1.reference.SwiftHeaders; import org.jclouds.util.Closeables2; import com.google.common.annotations.Beta; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.io.ByteSource; import com.google.common.io.ByteStreams; import com.google.common.io.Closeables; import com.google.common.net.HttpHeaders; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.AbstractModule; import com.google.inject.Injector; import com.google.inject.assistedinject.Assisted; public class RegionScopedSwiftBlobStore implements BlobStore { @Inject protected RegionScopedSwiftBlobStore(Injector baseGraph, BlobStoreContext context, SwiftApi api, @Memoized Supplier<Set<? extends Location>> locations, @Assisted String regionId, PayloadSlicer slicer, @Named(PROPERTY_USER_THREADS) ListeningExecutorService userExecutor) { checkNotNull(regionId, "regionId"); Optional<? extends Location> found = tryFind(locations.get(), idEquals(regionId)); checkArgument(found.isPresent(), "region %s not in %s", regionId, locations.get()); this.region = found.get(); this.regionId = regionId; this.slicer = slicer; this.toResourceMetadata = new ToResourceMetadata(found.get()); this.context = context; this.api = api; this.userExecutor = userExecutor; // until we parameterize ClearListStrategy with a factory this.clearList = baseGraph.createChildInjector(new AbstractModule() { @Override protected void configure() { bind(BlobStore.class).toInstance(RegionScopedSwiftBlobStore.this); } }).getInstance(ClearListStrategy.class); } private final BlobStoreContext context; private final ClearListStrategy clearList; private final SwiftApi api; private final Location region; private final String regionId; private final BlobToHttpGetOptions toGetOptions = new BlobToHttpGetOptions(); private final ToListContainerOptions toListContainerOptions = new ToListContainerOptions(); private final ToResourceMetadata toResourceMetadata; protected final PayloadSlicer slicer; protected final ListeningExecutorService userExecutor; @Resource protected Logger logger = Logger.NULL; @Override public Set<? extends Location> listAssignableLocations() { return ImmutableSet.of(region); } @Override public PageSet<? extends StorageMetadata> list() { // TODO: there may eventually be >10k containers.. FluentIterable<StorageMetadata> containers = api.getContainerApi(regionId).list() .transform(toResourceMetadata); return new PageSetImpl<StorageMetadata>(containers, null); } @Override public boolean containerExists(String container) { Container val = api.getContainerApi(regionId).get(container); containerCache.put(container, Optional.fromNullable(val)); return val != null; } @Override public boolean createContainerInLocation(Location location, String container) { return createContainerInLocation(location, container, CreateContainerOptions.NONE); } @Override public boolean createContainerInLocation(Location location, String container, CreateContainerOptions options) { checkArgument(location == null || location.equals(region), "location must be null or %s", region); boolean containerCreated = api.getContainerApi(regionId).create(container, options.isPublicRead() ? ANYBODY_READ : BASIC_CONTAINER); if (containerCreated) { containerCache.put(container, Optional.fromNullable(api.getContainerApi(regionId).get(container))); } return containerCreated; } @Override public ContainerAccess getContainerAccess(String name) { Container container = api.getContainerApi(regionId).get(name); if (container.getAnybodyRead().get()) { return ContainerAccess.PUBLIC_READ; } else { return ContainerAccess.PRIVATE; } } @Override public void setContainerAccess(String name, ContainerAccess access) { UpdateContainerOptions options = new UpdateContainerOptions(); if (access == ContainerAccess.PUBLIC_READ) { options.anybodyRead(); } else { options.headers(ImmutableMultimap.of(SwiftHeaders.CONTAINER_READ, SwiftHeaders.CONTAINER_ACL_PRIVATE)); } api.getContainerApi(regionId).update(name, options); } private static final org.jclouds.openstack.swift.v1.options.CreateContainerOptions BASIC_CONTAINER = new org.jclouds.openstack.swift.v1.options.CreateContainerOptions(); private static final org.jclouds.openstack.swift.v1.options.CreateContainerOptions ANYBODY_READ = new org.jclouds.openstack.swift.v1.options.CreateContainerOptions() .anybodyRead(); @Override public PageSet<? extends StorageMetadata> list(String container) { return list(container, ListContainerOptions.NONE); } @Override public PageSet<? extends StorageMetadata> list(final String container, ListContainerOptions options) { ObjectApi objectApi = api.getObjectApi(regionId, container); ObjectList objects = objectApi.list(toListContainerOptions.apply(options)); if (objects == null) { containerCache.put(container, Optional.<Container> absent()); return new PageSetImpl<StorageMetadata>(ImmutableList.<StorageMetadata> of(), null); } else { containerCache.put(container, Optional.of(objects.getContainer())); List<? extends StorageMetadata> list = transform(objects, toBlobMetadata(container)); int limit = Optional.fromNullable(options.getMaxResults()).or(10000); String marker = null; if (!list.isEmpty() && list.size() == limit) { marker = list.get(limit - 1).getName(); } // TODO: we should probably deprecate this option if (options.isDetailed()) { list = transform(list, new Function<StorageMetadata, StorageMetadata>() { @Override public StorageMetadata apply(StorageMetadata input) { if (input.getType() != StorageType.BLOB) { return input; } return blobMetadata(container, input.getName()); } }); } return new PageSetImpl<StorageMetadata>(list, marker); } } @Override public boolean blobExists(String container, String name) { return blobMetadata(container, name) != null; } @Override public String putBlob(String container, Blob blob) { return putBlob(container, blob, PutOptions.NONE); } @Override public String putBlob(String container, Blob blob, PutOptions options) { if (options.getBlobAccess() != BlobAccess.PRIVATE) { throw new UnsupportedOperationException("blob access not supported by swift"); } if (options.isMultipart()) { return putMultipartBlob(container, blob, options); } ObjectApi objectApi = api.getObjectApi(regionId, container); return objectApi.put(blob.getMetadata().getName(), blob.getPayload(), metadata(blob.getMetadata().getUserMetadata())); } @Override public String copyBlob(String fromContainer, String fromName, String toContainer, String toName, CopyOptions options) { ObjectApi objectApi = api.getObjectApi(regionId, toContainer); org.jclouds.openstack.swift.v1.options.CopyOptions swiftOptions = new org.jclouds.openstack.swift.v1.options.CopyOptions(); if (options.ifMatch() != null) { swiftOptions.ifMatch(options.ifMatch()); } if (options.ifNoneMatch() != null) { throw new UnsupportedOperationException("Swift does not support ifNoneMatch"); } if (options.ifModifiedSince() != null) { swiftOptions.ifModifiedSince(options.ifModifiedSince()); } if (options.ifUnmodifiedSince() != null) { swiftOptions.ifUnmodifiedSince(options.ifUnmodifiedSince()); } Map<String, String> systemMetadata = Maps.newHashMap(); ContentMetadata contentMetadata = options.contentMetadata(); Map<String, String> userMetadata = options.userMetadata(); if (contentMetadata != null || userMetadata != null) { if (contentMetadata != null) { String contentDisposition = contentMetadata.getContentDisposition(); if (contentDisposition != null) { systemMetadata.put(HttpHeaders.CONTENT_DISPOSITION, contentDisposition); } String contentEncoding = contentMetadata.getContentEncoding(); if (contentEncoding != null) { systemMetadata.put(HttpHeaders.CONTENT_ENCODING, contentEncoding); } String contentLanguage = contentMetadata.getContentLanguage(); if (contentLanguage != null) { systemMetadata.put(HttpHeaders.CONTENT_LANGUAGE, contentLanguage); } String contentType = contentMetadata.getContentType(); if (contentType != null) { systemMetadata.put(HttpHeaders.CONTENT_TYPE, contentType); } } if (userMetadata == null) { userMetadata = Maps.newHashMap(); } } else { SwiftObject metadata = api.getObjectApi(regionId, fromContainer).getWithoutBody(fromName); if (metadata == null) { throw new KeyNotFoundException(fromContainer, fromName, "Swift could not find the specified source key"); } contentMetadata = metadata.getPayload().getContentMetadata(); String contentDisposition = contentMetadata.getContentDisposition(); if (contentDisposition != null) { systemMetadata.put(HttpHeaders.CONTENT_DISPOSITION, contentDisposition); } String contentEncoding = contentMetadata.getContentEncoding(); if (contentEncoding != null) { systemMetadata.put(HttpHeaders.CONTENT_ENCODING, contentEncoding); } String contentLanguage = contentMetadata.getContentLanguage(); if (contentLanguage != null) { systemMetadata.put(HttpHeaders.CONTENT_LANGUAGE, contentLanguage); } String contentType = contentMetadata.getContentType(); if (contentType != null) { systemMetadata.put(HttpHeaders.CONTENT_TYPE, contentType); } userMetadata = metadata.getMetadata(); } objectApi.copy(toName, fromContainer, fromName, userMetadata, systemMetadata, swiftOptions); // TODO: Swift copy object *appends* user metadata, does not overwrite return objectApi.getWithoutBody(toName).getETag(); } @Override public BlobMetadata blobMetadata(String container, String name) { SwiftObject object = api.getObjectApi(regionId, container).getWithoutBody(name); if (object == null) { return null; } return toBlobMetadata(container).apply(object); } @Override public Blob getBlob(String container, String key) { return getBlob(container, key, GetOptions.NONE); } @Override public Blob getBlob(String container, String name, GetOptions options) { ObjectApi objectApi = api.getObjectApi(regionId, container); SwiftObject object = objectApi.get(name, toGetOptions.apply(options)); if (object == null) { return null; } Blob blob = new BlobImpl(toBlobMetadata(container).apply(object)); blob.setPayload(object.getPayload()); blob.setAllHeaders(object.getHeaders()); return blob; } @Override public void removeBlob(String container, String name) { // Multipart objects have a manifest which points to subobjects. Normally // deleting a object only deletes the manifest, leaving the subobjects. // We first try a multipart delete and if that fails since the object is // not an MPU we fall back to single-part delete. DeleteStaticLargeObjectResponse response = api.getStaticLargeObjectApi(regionId, container).delete(name); if (!response.status().equals("200 OK")) { api.getObjectApi(regionId, container).delete(name); } } /** * Delete multiple single-part objects. Note that this does not remove the * subobjects of a multi-part upload. */ @Override public void removeBlobs(String container, Iterable<String> names) { BulkApi bulkApi = api.getBulkApi(regionId); for (List<String> partition : Iterables.partition(names, 1000)) { ImmutableList.Builder<String> builder = ImmutableList.builder(); for (String name : partition) { builder.add(container + "/" + name); } bulkApi.bulkDelete(builder.build()); } } @Override public BlobAccess getBlobAccess(String container, String name) { return BlobAccess.PRIVATE; } @Override public void setBlobAccess(String container, String name, BlobAccess access) { throw new UnsupportedOperationException("unsupported in swift"); } @Override public BlobStoreContext getContext() { return context; } @Override public BlobBuilder blobBuilder(String name) { return new BlobBuilderImpl().name(name); } @Override public boolean directoryExists(String containerName, String directory) { return api.getObjectApi(regionId, containerName) .get(directory) != null; } @Override public void createDirectory(String containerName, String directory) { api.getObjectApi(regionId, containerName) .put(directory, directoryPayload); } private final Payload directoryPayload = new ByteSourcePayload(ByteSource.wrap(new byte[] {})) { { getContentMetadata().setContentType("application/directory"); } }; @Override public void deleteDirectory(String containerName, String directory) { api.getObjectApi(regionId, containerName).delete(directory); } @Override public long countBlobs(String containerName) { Container container = api.getContainerApi(regionId).get(containerName); // undefined if container doesn't exist, so default to zero return container != null && container.getObjectCount() != null ? container.getObjectCount() : 0; } @Override public MultipartUpload initiateMultipartUpload(String container, BlobMetadata blobMetadata, PutOptions options) { if (options.getBlobAccess() != BlobAccess.PRIVATE) { throw new UnsupportedOperationException("blob ACLs not supported in swift"); } return initiateMultipartUpload(container, blobMetadata, 0, options); } private MultipartUpload initiateMultipartUpload(String container, BlobMetadata blobMetadata, long partSize, PutOptions options) { Long contentLength = blobMetadata.getContentMetadata().getContentLength(); String uploadId = String.format(Locale.ENGLISH, "%s/slo/%.6f/%s/%s", blobMetadata.getName(), System.currentTimeMillis() / 1000.0, contentLength == null ? Long.valueOf(0) : contentLength, partSize); return MultipartUpload.create(container, blobMetadata.getName(), uploadId, blobMetadata, options); } @Override public void abortMultipartUpload(MultipartUpload mpu) { ImmutableList.Builder<String> names = ImmutableList.builder(); for (MultipartPart part : listMultipartUpload(mpu)) { names.add(getMPUPartName(mpu, part.partNumber())); } removeBlobs(mpu.containerName(), names.build()); } private ImmutableMap<String, String> getContentMetadataForManifest(ContentMetadata contentMetadata) { Builder<String, String> mapBuilder = ImmutableMap.builder(); if (contentMetadata.getContentType() != null) { mapBuilder.put("content-type", contentMetadata.getContentType()); } /** * Do not set content-length. Set automatically to manifest json string length by BindToJsonPayload */ if (contentMetadata.getContentDisposition() != null) { mapBuilder.put("content-disposition", contentMetadata.getContentDisposition()); } if (contentMetadata.getContentEncoding() != null) { mapBuilder.put("content-encoding", contentMetadata.getContentEncoding()); } if (contentMetadata.getContentLanguage() != null) { mapBuilder.put("content-language", contentMetadata.getContentLanguage()); } return mapBuilder.build(); } private String getMPUPartName(MultipartUpload mpu, int partNumber) { return String.format("%s/%08d", mpu.id(), partNumber); } @Override public String completeMultipartUpload(MultipartUpload mpu, List<MultipartPart> parts) { ImmutableList.Builder<Segment> builder = ImmutableList.builder(); for (MultipartPart part : parts) { String path = mpu.containerName() + "/" + getMPUPartName(mpu, part.partNumber()); builder.add(Segment.builder().path(path).etag(part.partETag()).sizeBytes(part.partSize()).build()); } return api.getStaticLargeObjectApi(regionId, mpu.containerName()).replaceManifest(mpu.blobName(), builder.build(), mpu.blobMetadata().getUserMetadata(), getContentMetadataForManifest(mpu.blobMetadata().getContentMetadata())); } @Override public MultipartPart uploadMultipartPart(MultipartUpload mpu, int partNumber, Payload payload) { String partName = getMPUPartName(mpu, partNumber); String eTag = api.getObjectApi(regionId, mpu.containerName()).put(partName, payload); long partSize = payload.getContentMetadata().getContentLength(); Date lastModified = null; // Swift does not return Last-Modified return MultipartPart.create(partNumber, partSize, eTag, lastModified); } @Override public List<MultipartPart> listMultipartUpload(MultipartUpload mpu) { ImmutableList.Builder<MultipartPart> parts = ImmutableList.builder(); PageSet<? extends StorageMetadata> pageSet = list(mpu.containerName(), new ListContainerOptions().prefix(mpu.id() + "/")); // TODO: pagination for (StorageMetadata sm : pageSet) { int lastSlash = sm.getName().lastIndexOf('/'); int partNumber = Integer.parseInt(sm.getName().substring(lastSlash + 1)); parts.add(MultipartPart.create(partNumber, sm.getSize(), sm.getETag(), sm.getLastModified())); } return parts.build(); } @Override public List<MultipartUpload> listMultipartUploads(String container) { throw new UnsupportedOperationException(); } @Override public long getMinimumMultipartPartSize() { return 1024 * 1024 + 1; } @Override public long getMaximumMultipartPartSize() { return 5L * 1024L * 1024L * 1024L; } @Override public int getMaximumNumberOfParts() { return Integer.MAX_VALUE; } @Override public void clearContainer(String containerName) { clearContainer(containerName, recursive()); } @Override public void clearContainer(String containerName, ListContainerOptions options) { // this could be implemented to use bulk delete clearList.execute(containerName, options); } @Override public void deleteContainer(String container) { clearContainer(container, recursive()); api.getContainerApi(regionId).deleteIfEmpty(container); containerCache.invalidate(container); } @Override public boolean deleteContainerIfEmpty(String container) { boolean deleted = api.getContainerApi(regionId).deleteIfEmpty(container); if (deleted) { containerCache.invalidate(container); } return deleted; } protected final LoadingCache<String, Optional<Container>> containerCache = CacheBuilder.newBuilder().build( new CacheLoader<String, Optional<Container>>() { public Optional<Container> load(String container) { return Optional.fromNullable(api.getContainerApi(regionId).get(container)); } }); protected Function<SwiftObject, MutableBlobMetadata> toBlobMetadata(String container) { return new ToBlobMetadata(containerCache.getUnchecked(container).get()); } @Override public long countBlobs(String containerName, ListContainerOptions options) { throw new UnsupportedOperationException(); } @com.google.inject.Inject(optional = true) @Named(Constants.PROPERTY_MAX_RETRIES) protected int retryCountLimit = 5; /** * Upload using a user-provided executor, or the jclouds userExecutor * * @param container * @param blob * @param overrides * @return the multipart blob etag */ @Beta protected String putMultipartBlob(String container, Blob blob, PutOptions overrides) { if (overrides.getUseCustomExecutor()) { return putMultipartBlob(container, blob, overrides, overrides.getCustomExecutor()); } else { return putMultipartBlob(container, blob, overrides, userExecutor); } } @Beta protected String putMultipartBlob(String container, Blob blob, PutOptions overrides, ListeningExecutorService executor) { ArrayList<ListenableFuture<MultipartPart>> parts = new ArrayList<ListenableFuture<MultipartPart>>(); long contentLength = checkNotNull(blob.getMetadata().getContentMetadata().getContentLength(), "must provide content-length to use multi-part upload"); MultipartUploadSlicingAlgorithm algorithm = new MultipartUploadSlicingAlgorithm( getMinimumMultipartPartSize(), getMaximumMultipartPartSize(), getMaximumNumberOfParts()); long partSize = algorithm.calculateChunkSize(contentLength); MultipartUpload mpu = initiateMultipartUpload(container, blob.getMetadata(), partSize, overrides); int partNumber = 0; for (Payload payload : slicer.slice(blob.getPayload(), partSize)) { BlobUploader b = new BlobUploader(mpu, partNumber++, payload); parts.add(executor.submit(b)); } return completeMultipartUpload(mpu, Futures.getUnchecked(Futures.allAsList(parts))); } private final class BlobUploader implements Callable<MultipartPart> { private final MultipartUpload mpu; private final int partNumber; private final Payload payload; BlobUploader(MultipartUpload mpu, int partNumber, Payload payload) { this.mpu = mpu; this.partNumber = partNumber; this.payload = payload; } @Override public MultipartPart call() { return uploadMultipartPart(mpu, partNumber, payload); } } @Override @Beta public void downloadBlob(String container, String name, File destination) { downloadBlob(container, name, destination, userExecutor); } @Override @Beta public void downloadBlob(String container, String name, File destination, ExecutorService executor) { ListeningExecutorService listeningExecutor = MoreExecutors.listeningDecorator(executor); RandomAccessFile raf = null; File tempFile = new File(destination + "." + UUID.randomUUID()); try { long contentLength = api .getObjectApi(regionId, container) .getWithoutBody(name) .getPayload() .getContentMetadata() .getContentLength(); // Reserve space for performance reasons raf = new RandomAccessFile(tempFile, "rw"); raf.seek(contentLength - 1); raf.write(0); // Determine download buffer size, smaller means less memory usage; larger is faster as long as threads are saturated long partSize = getMinimumMultipartPartSize(); // Loop through ranges within the file long from; long to; List<ListenableFuture<Void>> results = new ArrayList<ListenableFuture<Void>>(); for (from = 0; from < contentLength; from = from + partSize) { to = (from + partSize >= contentLength) ? contentLength - 1 : from + partSize - 1; BlobDownloader b = new BlobDownloader(regionId, container, name, raf, from, to); results.add(listeningExecutor.submit(b)); } Futures.getUnchecked(Futures.allAsList(results)); raf.getChannel().force(true); raf.getChannel().close(); raf.close(); if (destination.exists()) { destination.delete(); } if (!tempFile.renameTo(destination)) { throw new RuntimeException("Could not move temporary downloaded file to destination " + destination); } tempFile = null; } catch (IOException e) { throw new RuntimeException(e); } finally { Closeables2.closeQuietly(raf); if (tempFile != null) { tempFile.delete(); } } } private final class BlobDownloader implements Callable<Void> { String regionId; String containerName; String objectName; private final RandomAccessFile raf; private final long begin; private final long end; BlobDownloader(String regionId, String containerName, String objectName, RandomAccessFile raf, long begin, long end) { this.regionId = regionId; this.containerName = containerName; this.objectName = objectName; this.raf = raf; this.begin = begin; this.end = end; } @Override public Void call() { IOException lastException = null; for (int retry = 0; retry < retryCountLimit; retry++) { try { SwiftObject object = api.getObjectApi(regionId, containerName) .get(objectName, org.jclouds.http.options.GetOptions.Builder.range(begin, end)); // Download first, this is the part that usually fails byte[] targetArray; InputStream is = object.getPayload().openStream(); try { targetArray = ByteStreams.toByteArray(is); } finally { Closeables.closeQuietly(is); } // Map file region MappedByteBuffer out = raf.getChannel().map(FileChannel.MapMode.READ_WRITE, begin, end - begin + 1); out.put(targetArray); out.force(); // JDK-4715154 ; TODO: Java 8 FileChannels if (System.getProperty("os.name").toLowerCase().contains("windows")) { closeDirectBuffer(out); } } catch (IOException e) { lastException = e; continue; } // Success! return null; } throw new RuntimeException("After " + retryCountLimit + " retries: " + lastException); } // JDK-4715154 private void closeDirectBuffer(MappedByteBuffer mbb) { if ( mbb == null || !mbb.isDirect() ) return; try { Method cleaner = mbb.getClass().getMethod("cleaner"); cleaner.setAccessible(true); Method clean = Class.forName("sun.misc.Cleaner").getMethod("clean"); clean.setAccessible(true); clean.invoke(cleaner.invoke(mbb)); } catch (Exception e) { logger.warn(e.toString()); } } } @Beta @Override public InputStream streamBlob(final String container, final String name) { return streamBlob(container, name, userExecutor); } @Beta @Override public InputStream streamBlob(final String container, final String name, final ExecutorService executor) { final ListeningExecutorService listeningExecutor = MoreExecutors.listeningDecorator(executor); // User will receive the Input end of the piped stream final PipedOutputStream output; final PipedInputStream input; try { output = new PipedOutputStream(); input = new PipedInputStream(output, getMinimumMultipartPartSize() * 5 > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) getMinimumMultipartPartSize() * 5); } catch (IOException e) { throw new RuntimeException(e); } // The total length of the file to download is needed to determine ranges // It has to be obtainable without downloading the whole file final long contentLength = api .getObjectApi(regionId, container) .getWithoutBody(name) .getPayload() .getContentMetadata() .getContentLength(); // Determine download buffer size, smaller means less memory usage; larger is faster as long as threads are saturated final long partSize = getMinimumMultipartPartSize(); // Used to communicate between the producer and consumer threads final LinkedBlockingQueue<ListenableFuture<byte[]>> results = new LinkedBlockingQueue<ListenableFuture<byte[]>>(); listeningExecutor.submit(new Runnable() { @Override public void run() { ListenableFuture<byte[]> result; long from; try { for (from = 0; from < contentLength; from = from + partSize) { logger.debug(Thread.currentThread() + " writing to output"); result = results.take(); if (result == null) { output.close(); input.close(); throw new RuntimeException("Error downloading file part to stream"); } output.write(result.get()); } } catch (Exception e) { logger.debug(e.toString()); // Close pipe so client is notified of an exception Closeables2.closeQuietly(input); throw new RuntimeException(e); } finally { // Finished writing results to stream Closeables2.closeQuietly(output); } } }); listeningExecutor.submit(new Runnable() { @Override public void run() { long from; long to; // Loop through ranges within the file for (from = 0; from < contentLength; from = from + partSize) { to = (from + partSize >= contentLength) ? contentLength - 1 : from + partSize - 1; BlobStreamDownloader b = new BlobStreamDownloader(container, name, from, to); results.add(listeningExecutor.submit(b)); } } }); return input; } private final class BlobStreamDownloader implements Callable<byte[]> { String containerName; String objectName; private final long begin; private final long end; BlobStreamDownloader(String containerName, String objectName, long begin, long end) { this.containerName = containerName; this.objectName = objectName; this.begin = begin; this.end = end; } @Override public byte[] call() { IOException lastException = null; for (int retry = 0; retry < retryCountLimit; retry++) { try { long time = System.nanoTime(); SwiftObject object = api.getObjectApi(regionId, containerName) .get(objectName, org.jclouds.http.options.GetOptions.Builder.range(begin, end)); byte[] downloadedBlock; InputStream is = object.getPayload().openStream(); try { downloadedBlock = ByteStreams.toByteArray(is); } finally { Closeables.closeQuietly(is); } return downloadedBlock; } catch (IOException e) { logger.debug(e.toString()); lastException = e; continue; } } throw new RuntimeException("After " + retryCountLimit + " retries: " + lastException); } } }
googleapis/google-cloud-java
37,151
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListRagFilesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/vertex_rag_data_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for * [VertexRagDataService.ListRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ListRagFiles]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListRagFilesResponse} */ public final class ListRagFilesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListRagFilesResponse) ListRagFilesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRagFilesResponse.newBuilder() to construct. private ListRagFilesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRagFilesResponse() { ragFiles_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRagFilesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListRagFilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListRagFilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse.class, com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse.Builder.class); } public static final int RAG_FILES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1beta1.RagFile> ragFiles_; /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1beta1.RagFile> getRagFilesList() { return ragFiles_; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.RagFileOrBuilder> getRagFilesOrBuilderList() { return ragFiles_; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ @java.lang.Override public int getRagFilesCount() { return ragFiles_.size(); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.RagFile getRagFiles(int index) { return ragFiles_.get(index); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.RagFileOrBuilder getRagFilesOrBuilder(int index) { return ragFiles_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1beta1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1beta1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < ragFiles_.size(); i++) { output.writeMessage(1, ragFiles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < ragFiles_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, ragFiles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse other = (com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse) obj; if (!getRagFilesList().equals(other.getRagFilesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRagFilesCount() > 0) { hash = (37 * hash) + RAG_FILES_FIELD_NUMBER; hash = (53 * hash) + getRagFilesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [VertexRagDataService.ListRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ListRagFiles]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListRagFilesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListRagFilesResponse) com.google.cloud.aiplatform.v1beta1.ListRagFilesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListRagFilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListRagFilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse.class, com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (ragFilesBuilder_ == null) { ragFiles_ = java.util.Collections.emptyList(); } else { ragFiles_ = null; ragFilesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListRagFilesResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse build() { com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse result = new com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse result) { if (ragFilesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { ragFiles_ = java.util.Collections.unmodifiableList(ragFiles_); bitField0_ = (bitField0_ & ~0x00000001); } result.ragFiles_ = ragFiles_; } else { result.ragFiles_ = ragFilesBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse.getDefaultInstance()) return this; if (ragFilesBuilder_ == null) { if (!other.ragFiles_.isEmpty()) { if (ragFiles_.isEmpty()) { ragFiles_ = other.ragFiles_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRagFilesIsMutable(); ragFiles_.addAll(other.ragFiles_); } onChanged(); } } else { if (!other.ragFiles_.isEmpty()) { if (ragFilesBuilder_.isEmpty()) { ragFilesBuilder_.dispose(); ragFilesBuilder_ = null; ragFiles_ = other.ragFiles_; bitField0_ = (bitField0_ & ~0x00000001); ragFilesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRagFilesFieldBuilder() : null; } else { ragFilesBuilder_.addAllMessages(other.ragFiles_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1beta1.RagFile m = input.readMessage( com.google.cloud.aiplatform.v1beta1.RagFile.parser(), extensionRegistry); if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.add(m); } else { ragFilesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1beta1.RagFile> ragFiles_ = java.util.Collections.emptyList(); private void ensureRagFilesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { ragFiles_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.RagFile>(ragFiles_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.RagFile, com.google.cloud.aiplatform.v1beta1.RagFile.Builder, com.google.cloud.aiplatform.v1beta1.RagFileOrBuilder> ragFilesBuilder_; /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.RagFile> getRagFilesList() { if (ragFilesBuilder_ == null) { return java.util.Collections.unmodifiableList(ragFiles_); } else { return ragFilesBuilder_.getMessageList(); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public int getRagFilesCount() { if (ragFilesBuilder_ == null) { return ragFiles_.size(); } else { return ragFilesBuilder_.getCount(); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.RagFile getRagFiles(int index) { if (ragFilesBuilder_ == null) { return ragFiles_.get(index); } else { return ragFilesBuilder_.getMessage(index); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder setRagFiles(int index, com.google.cloud.aiplatform.v1beta1.RagFile value) { if (ragFilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRagFilesIsMutable(); ragFiles_.set(index, value); onChanged(); } else { ragFilesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder setRagFiles( int index, com.google.cloud.aiplatform.v1beta1.RagFile.Builder builderForValue) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.set(index, builderForValue.build()); onChanged(); } else { ragFilesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder addRagFiles(com.google.cloud.aiplatform.v1beta1.RagFile value) { if (ragFilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRagFilesIsMutable(); ragFiles_.add(value); onChanged(); } else { ragFilesBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder addRagFiles(int index, com.google.cloud.aiplatform.v1beta1.RagFile value) { if (ragFilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRagFilesIsMutable(); ragFiles_.add(index, value); onChanged(); } else { ragFilesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder addRagFiles( com.google.cloud.aiplatform.v1beta1.RagFile.Builder builderForValue) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.add(builderForValue.build()); onChanged(); } else { ragFilesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder addRagFiles( int index, com.google.cloud.aiplatform.v1beta1.RagFile.Builder builderForValue) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.add(index, builderForValue.build()); onChanged(); } else { ragFilesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder addAllRagFiles( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.RagFile> values) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, ragFiles_); onChanged(); } else { ragFilesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder clearRagFiles() { if (ragFilesBuilder_ == null) { ragFiles_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { ragFilesBuilder_.clear(); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public Builder removeRagFiles(int index) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.remove(index); onChanged(); } else { ragFilesBuilder_.remove(index); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.RagFile.Builder getRagFilesBuilder(int index) { return getRagFilesFieldBuilder().getBuilder(index); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.RagFileOrBuilder getRagFilesOrBuilder(int index) { if (ragFilesBuilder_ == null) { return ragFiles_.get(index); } else { return ragFilesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.RagFileOrBuilder> getRagFilesOrBuilderList() { if (ragFilesBuilder_ != null) { return ragFilesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(ragFiles_); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.RagFile.Builder addRagFilesBuilder() { return getRagFilesFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1beta1.RagFile.getDefaultInstance()); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.RagFile.Builder addRagFilesBuilder(int index) { return getRagFilesFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1beta1.RagFile.getDefaultInstance()); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.RagFile rag_files = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.RagFile.Builder> getRagFilesBuilderList() { return getRagFilesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.RagFile, com.google.cloud.aiplatform.v1beta1.RagFile.Builder, com.google.cloud.aiplatform.v1beta1.RagFileOrBuilder> getRagFilesFieldBuilder() { if (ragFilesBuilder_ == null) { ragFilesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.RagFile, com.google.cloud.aiplatform.v1beta1.RagFile.Builder, com.google.cloud.aiplatform.v1beta1.RagFileOrBuilder>( ragFiles_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); ragFiles_ = null; } return ragFilesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1beta1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1beta1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1beta1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1beta1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1beta1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListRagFilesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListRagFilesResponse) private static final com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse(); } public static com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRagFilesResponse> PARSER = new com.google.protobuf.AbstractParser<ListRagFilesResponse>() { @java.lang.Override public ListRagFilesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRagFilesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRagFilesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListRagFilesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,220
java-talent/proto-google-cloud-talent-v4/src/main/java/com/google/cloud/talent/v4/UpdateCompanyRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/talent/v4/company_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.talent.v4; /** * * * <pre> * Request for updating a specified company. * </pre> * * Protobuf type {@code google.cloud.talent.v4.UpdateCompanyRequest} */ public final class UpdateCompanyRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.talent.v4.UpdateCompanyRequest) UpdateCompanyRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateCompanyRequest.newBuilder() to construct. private UpdateCompanyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateCompanyRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateCompanyRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4.CompanyServiceProto .internal_static_google_cloud_talent_v4_UpdateCompanyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4.CompanyServiceProto .internal_static_google_cloud_talent_v4_UpdateCompanyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4.UpdateCompanyRequest.class, com.google.cloud.talent.v4.UpdateCompanyRequest.Builder.class); } private int bitField0_; public static final int COMPANY_FIELD_NUMBER = 1; private com.google.cloud.talent.v4.Company company_; /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the company field is set. */ @java.lang.Override public boolean hasCompany() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The company. */ @java.lang.Override public com.google.cloud.talent.v4.Company getCompany() { return company_ == null ? com.google.cloud.talent.v4.Company.getDefaultInstance() : company_; } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.talent.v4.CompanyOrBuilder getCompanyOrBuilder() { return company_ == null ? com.google.cloud.talent.v4.Company.getDefaultInstance() : company_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCompany()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCompany()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.talent.v4.UpdateCompanyRequest)) { return super.equals(obj); } com.google.cloud.talent.v4.UpdateCompanyRequest other = (com.google.cloud.talent.v4.UpdateCompanyRequest) obj; if (hasCompany() != other.hasCompany()) return false; if (hasCompany()) { if (!getCompany().equals(other.getCompany())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCompany()) { hash = (37 * hash) + COMPANY_FIELD_NUMBER; hash = (53 * hash) + getCompany().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.UpdateCompanyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.talent.v4.UpdateCompanyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for updating a specified company. * </pre> * * Protobuf type {@code google.cloud.talent.v4.UpdateCompanyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.talent.v4.UpdateCompanyRequest) com.google.cloud.talent.v4.UpdateCompanyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4.CompanyServiceProto .internal_static_google_cloud_talent_v4_UpdateCompanyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4.CompanyServiceProto .internal_static_google_cloud_talent_v4_UpdateCompanyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4.UpdateCompanyRequest.class, com.google.cloud.talent.v4.UpdateCompanyRequest.Builder.class); } // Construct using com.google.cloud.talent.v4.UpdateCompanyRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCompanyFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; company_ = null; if (companyBuilder_ != null) { companyBuilder_.dispose(); companyBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.talent.v4.CompanyServiceProto .internal_static_google_cloud_talent_v4_UpdateCompanyRequest_descriptor; } @java.lang.Override public com.google.cloud.talent.v4.UpdateCompanyRequest getDefaultInstanceForType() { return com.google.cloud.talent.v4.UpdateCompanyRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.talent.v4.UpdateCompanyRequest build() { com.google.cloud.talent.v4.UpdateCompanyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.talent.v4.UpdateCompanyRequest buildPartial() { com.google.cloud.talent.v4.UpdateCompanyRequest result = new com.google.cloud.talent.v4.UpdateCompanyRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.talent.v4.UpdateCompanyRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.company_ = companyBuilder_ == null ? company_ : companyBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.talent.v4.UpdateCompanyRequest) { return mergeFrom((com.google.cloud.talent.v4.UpdateCompanyRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.talent.v4.UpdateCompanyRequest other) { if (other == com.google.cloud.talent.v4.UpdateCompanyRequest.getDefaultInstance()) return this; if (other.hasCompany()) { mergeCompany(other.getCompany()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCompanyFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.talent.v4.Company company_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4.Company, com.google.cloud.talent.v4.Company.Builder, com.google.cloud.talent.v4.CompanyOrBuilder> companyBuilder_; /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the company field is set. */ public boolean hasCompany() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The company. */ public com.google.cloud.talent.v4.Company getCompany() { if (companyBuilder_ == null) { return company_ == null ? com.google.cloud.talent.v4.Company.getDefaultInstance() : company_; } else { return companyBuilder_.getMessage(); } } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCompany(com.google.cloud.talent.v4.Company value) { if (companyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } company_ = value; } else { companyBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCompany(com.google.cloud.talent.v4.Company.Builder builderForValue) { if (companyBuilder_ == null) { company_ = builderForValue.build(); } else { companyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCompany(com.google.cloud.talent.v4.Company value) { if (companyBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && company_ != null && company_ != com.google.cloud.talent.v4.Company.getDefaultInstance()) { getCompanyBuilder().mergeFrom(value); } else { company_ = value; } } else { companyBuilder_.mergeFrom(value); } if (company_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCompany() { bitField0_ = (bitField0_ & ~0x00000001); company_ = null; if (companyBuilder_ != null) { companyBuilder_.dispose(); companyBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4.Company.Builder getCompanyBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCompanyFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4.CompanyOrBuilder getCompanyOrBuilder() { if (companyBuilder_ != null) { return companyBuilder_.getMessageOrBuilder(); } else { return company_ == null ? com.google.cloud.talent.v4.Company.getDefaultInstance() : company_; } } /** * * * <pre> * Required. The company resource to replace the current resource in the * system. * </pre> * * <code>.google.cloud.talent.v4.Company company = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4.Company, com.google.cloud.talent.v4.Company.Builder, com.google.cloud.talent.v4.CompanyOrBuilder> getCompanyFieldBuilder() { if (companyBuilder_ == null) { companyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4.Company, com.google.cloud.talent.v4.Company.Builder, com.google.cloud.talent.v4.CompanyOrBuilder>( getCompany(), getParentForChildren(), isClean()); company_ = null; } return companyBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4.UpdateCompanyRequest.update_mask] * is provided, only the specified fields in * [company][google.cloud.talent.v4.UpdateCompanyRequest.company] are updated. * Otherwise all the fields are updated. * * A field mask to specify the company fields to be updated. Only * top level fields of [Company][google.cloud.talent.v4.Company] are * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.talent.v4.UpdateCompanyRequest) } // @@protoc_insertion_point(class_scope:google.cloud.talent.v4.UpdateCompanyRequest) private static final com.google.cloud.talent.v4.UpdateCompanyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.talent.v4.UpdateCompanyRequest(); } public static com.google.cloud.talent.v4.UpdateCompanyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateCompanyRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateCompanyRequest>() { @java.lang.Override public UpdateCompanyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateCompanyRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateCompanyRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.talent.v4.UpdateCompanyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,224
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PairwiseSummarizationQualityInput.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Input for pairwise summarization quality metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput} */ public final class PairwiseSummarizationQualityInput extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput) PairwiseSummarizationQualityInputOrBuilder { private static final long serialVersionUID = 0L; // Use PairwiseSummarizationQualityInput.newBuilder() to construct. private PairwiseSummarizationQualityInput( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PairwiseSummarizationQualityInput() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PairwiseSummarizationQualityInput(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseSummarizationQualityInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseSummarizationQualityInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput.class, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput.Builder.class); } private int bitField0_; public static final int METRIC_SPEC_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metricSpec_; /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ @java.lang.Override public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec getMetricSpec() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec.getDefaultInstance() : metricSpec_; } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpecOrBuilder getMetricSpecOrBuilder() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec.getDefaultInstance() : metricSpec_; } public static final int INSTANCE_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance_; /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ @java.lang.Override public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance getInstance() { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance .getDefaultInstance() : instance_; } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstanceOrBuilder getInstanceOrBuilder() { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance .getDefaultInstance() : instance_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getInstance()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput other = (com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput) obj; if (hasMetricSpec() != other.hasMetricSpec()) return false; if (hasMetricSpec()) { if (!getMetricSpec().equals(other.getMetricSpec())) return false; } if (hasInstance() != other.hasInstance()) return false; if (hasInstance()) { if (!getInstance().equals(other.getInstance())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMetricSpec()) { hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER; hash = (53 * hash) + getMetricSpec().hashCode(); } if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Input for pairwise summarization quality metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput) com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInputOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseSummarizationQualityInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseSummarizationQualityInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput.class, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput.Builder.class); } // Construct using // com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMetricSpecFieldBuilder(); getInstanceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseSummarizationQualityInput_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput build() { com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput buildPartial() { com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput result = new com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput) { return mergeFrom( (com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput other) { if (other == com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput .getDefaultInstance()) return this; if (other.hasMetricSpec()) { mergeMetricSpec(other.getMetricSpec()); } if (other.hasInstance()) { mergeInstance(other.getInstance()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metricSpec_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec.Builder, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpecOrBuilder> metricSpecBuilder_; /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec getMetricSpec() { if (metricSpecBuilder_ == null) { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec .getDefaultInstance() : metricSpec_; } else { return metricSpecBuilder_.getMessage(); } } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec value) { if (metricSpecBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metricSpec_ = value; } else { metricSpecBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec.Builder builderForValue) { if (metricSpecBuilder_ == null) { metricSpec_ = builderForValue.build(); } else { metricSpecBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMetricSpec( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec value) { if (metricSpecBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && metricSpec_ != null && metricSpec_ != com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec .getDefaultInstance()) { getMetricSpecBuilder().mergeFrom(value); } else { metricSpec_ = value; } } else { metricSpecBuilder_.mergeFrom(value); } if (metricSpec_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMetricSpec() { bitField0_ = (bitField0_ & ~0x00000001); metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec.Builder getMetricSpecBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMetricSpecFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpecOrBuilder getMetricSpecOrBuilder() { if (metricSpecBuilder_ != null) { return metricSpecBuilder_.getMessageOrBuilder(); } else { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec .getDefaultInstance() : metricSpec_; } } /** * * * <pre> * Required. Spec for pairwise summarization quality score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec.Builder, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpecOrBuilder> getMetricSpecFieldBuilder() { if (metricSpecBuilder_ == null) { metricSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpec.Builder, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualitySpecOrBuilder>( getMetricSpec(), getParentForChildren(), isClean()); metricSpec_ = null; } return metricSpecBuilder_; } private com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance.Builder, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstanceOrBuilder> instanceBuilder_; /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance getInstance() { if (instanceBuilder_ == null) { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance .getDefaultInstance() : instance_; } else { return instanceBuilder_.getMessage(); } } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance value) { if (instanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; } else { instanceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance.Builder builderForValue) { if (instanceBuilder_ == null) { instance_ = builderForValue.build(); } else { instanceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeInstance( com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance value) { if (instanceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && instance_ != null && instance_ != com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance .getDefaultInstance()) { getInstanceBuilder().mergeFrom(value); } else { instance_ = value; } } else { instanceBuilder_.mergeFrom(value); } if (instance_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance.Builder getInstanceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstanceOrBuilder getInstanceOrBuilder() { if (instanceBuilder_ != null) { return instanceBuilder_.getMessageOrBuilder(); } else { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance .getDefaultInstance() : instance_; } } /** * * * <pre> * Required. Pairwise summarization quality instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance.Builder, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstanceOrBuilder> getInstanceFieldBuilder() { if (instanceBuilder_ == null) { instanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstance.Builder, com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInstanceOrBuilder>( getInstance(), getParentForChildren(), isClean()); instance_ = null; } return instanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput) private static final com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput(); } public static com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PairwiseSummarizationQualityInput> PARSER = new com.google.protobuf.AbstractParser<PairwiseSummarizationQualityInput>() { @java.lang.Override public PairwiseSummarizationQualityInput parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PairwiseSummarizationQualityInput> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PairwiseSummarizationQualityInput> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,224
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/QuestionAnsweringCorrectnessInput.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Input for question answering correctness metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput} */ public final class QuestionAnsweringCorrectnessInput extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput) QuestionAnsweringCorrectnessInputOrBuilder { private static final long serialVersionUID = 0L; // Use QuestionAnsweringCorrectnessInput.newBuilder() to construct. private QuestionAnsweringCorrectnessInput( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private QuestionAnsweringCorrectnessInput() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new QuestionAnsweringCorrectnessInput(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringCorrectnessInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringCorrectnessInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput.class, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput.Builder.class); } private int bitField0_; public static final int METRIC_SPEC_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metricSpec_; /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ @java.lang.Override public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec getMetricSpec() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec.getDefaultInstance() : metricSpec_; } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpecOrBuilder getMetricSpecOrBuilder() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec.getDefaultInstance() : metricSpec_; } public static final int INSTANCE_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance_; /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ @java.lang.Override public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance getInstance() { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance .getDefaultInstance() : instance_; } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstanceOrBuilder getInstanceOrBuilder() { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance .getDefaultInstance() : instance_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getInstance()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput other = (com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput) obj; if (hasMetricSpec() != other.hasMetricSpec()) return false; if (hasMetricSpec()) { if (!getMetricSpec().equals(other.getMetricSpec())) return false; } if (hasInstance() != other.hasInstance()) return false; if (hasInstance()) { if (!getInstance().equals(other.getInstance())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMetricSpec()) { hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER; hash = (53 * hash) + getMetricSpec().hashCode(); } if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Input for question answering correctness metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput) com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInputOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringCorrectnessInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringCorrectnessInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput.class, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput.Builder.class); } // Construct using // com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMetricSpecFieldBuilder(); getInstanceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringCorrectnessInput_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput build() { com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput buildPartial() { com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput result = new com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput) { return mergeFrom( (com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput other) { if (other == com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput .getDefaultInstance()) return this; if (other.hasMetricSpec()) { mergeMetricSpec(other.getMetricSpec()); } if (other.hasInstance()) { mergeInstance(other.getInstance()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metricSpec_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpecOrBuilder> metricSpecBuilder_; /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec getMetricSpec() { if (metricSpecBuilder_ == null) { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec .getDefaultInstance() : metricSpec_; } else { return metricSpecBuilder_.getMessage(); } } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec value) { if (metricSpecBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metricSpec_ = value; } else { metricSpecBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec.Builder builderForValue) { if (metricSpecBuilder_ == null) { metricSpec_ = builderForValue.build(); } else { metricSpecBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMetricSpec( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec value) { if (metricSpecBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && metricSpec_ != null && metricSpec_ != com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec .getDefaultInstance()) { getMetricSpecBuilder().mergeFrom(value); } else { metricSpec_ = value; } } else { metricSpecBuilder_.mergeFrom(value); } if (metricSpec_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMetricSpec() { bitField0_ = (bitField0_ & ~0x00000001); metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec.Builder getMetricSpecBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMetricSpecFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpecOrBuilder getMetricSpecOrBuilder() { if (metricSpecBuilder_ != null) { return metricSpecBuilder_.getMessageOrBuilder(); } else { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec .getDefaultInstance() : metricSpec_; } } /** * * * <pre> * Required. Spec for question answering correctness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpecOrBuilder> getMetricSpecFieldBuilder() { if (metricSpecBuilder_ == null) { metricSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpec.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessSpecOrBuilder>( getMetricSpec(), getParentForChildren(), isClean()); metricSpec_ = null; } return metricSpecBuilder_; } private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstanceOrBuilder> instanceBuilder_; /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance getInstance() { if (instanceBuilder_ == null) { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance .getDefaultInstance() : instance_; } else { return instanceBuilder_.getMessage(); } } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance value) { if (instanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; } else { instanceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance.Builder builderForValue) { if (instanceBuilder_ == null) { instance_ = builderForValue.build(); } else { instanceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeInstance( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance value) { if (instanceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && instance_ != null && instance_ != com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance .getDefaultInstance()) { getInstanceBuilder().mergeFrom(value); } else { instance_ = value; } } else { instanceBuilder_.mergeFrom(value); } if (instance_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance.Builder getInstanceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstanceOrBuilder getInstanceOrBuilder() { if (instanceBuilder_ != null) { return instanceBuilder_.getMessageOrBuilder(); } else { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance .getDefaultInstance() : instance_; } } /** * * * <pre> * Required. Question answering correctness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstanceOrBuilder> getInstanceFieldBuilder() { if (instanceBuilder_ == null) { instanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstance.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInstanceOrBuilder>( getInstance(), getParentForChildren(), isClean()); instance_ = null; } return instanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput) private static final com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput(); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<QuestionAnsweringCorrectnessInput> PARSER = new com.google.protobuf.AbstractParser<QuestionAnsweringCorrectnessInput>() { @java.lang.Override public QuestionAnsweringCorrectnessInput parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<QuestionAnsweringCorrectnessInput> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<QuestionAnsweringCorrectnessInput> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,224
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/QuestionAnsweringHelpfulnessInput.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Input for question answering helpfulness metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput} */ public final class QuestionAnsweringHelpfulnessInput extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput) QuestionAnsweringHelpfulnessInputOrBuilder { private static final long serialVersionUID = 0L; // Use QuestionAnsweringHelpfulnessInput.newBuilder() to construct. private QuestionAnsweringHelpfulnessInput( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private QuestionAnsweringHelpfulnessInput() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new QuestionAnsweringHelpfulnessInput(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringHelpfulnessInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringHelpfulnessInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput.class, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput.Builder.class); } private int bitField0_; public static final int METRIC_SPEC_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metricSpec_; /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ @java.lang.Override public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec getMetricSpec() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec.getDefaultInstance() : metricSpec_; } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpecOrBuilder getMetricSpecOrBuilder() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec.getDefaultInstance() : metricSpec_; } public static final int INSTANCE_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance_; /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ @java.lang.Override public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance getInstance() { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance .getDefaultInstance() : instance_; } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstanceOrBuilder getInstanceOrBuilder() { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance .getDefaultInstance() : instance_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getInstance()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput other = (com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput) obj; if (hasMetricSpec() != other.hasMetricSpec()) return false; if (hasMetricSpec()) { if (!getMetricSpec().equals(other.getMetricSpec())) return false; } if (hasInstance() != other.hasInstance()) return false; if (hasInstance()) { if (!getInstance().equals(other.getInstance())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMetricSpec()) { hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER; hash = (53 * hash) + getMetricSpec().hashCode(); } if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Input for question answering helpfulness metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput) com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInputOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringHelpfulnessInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringHelpfulnessInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput.class, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput.Builder.class); } // Construct using // com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMetricSpecFieldBuilder(); getInstanceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_QuestionAnsweringHelpfulnessInput_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput build() { com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput buildPartial() { com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput result = new com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput) { return mergeFrom( (com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput other) { if (other == com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput .getDefaultInstance()) return this; if (other.hasMetricSpec()) { mergeMetricSpec(other.getMetricSpec()); } if (other.hasInstance()) { mergeInstance(other.getInstance()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metricSpec_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpecOrBuilder> metricSpecBuilder_; /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec getMetricSpec() { if (metricSpecBuilder_ == null) { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec .getDefaultInstance() : metricSpec_; } else { return metricSpecBuilder_.getMessage(); } } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec value) { if (metricSpecBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metricSpec_ = value; } else { metricSpecBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec.Builder builderForValue) { if (metricSpecBuilder_ == null) { metricSpec_ = builderForValue.build(); } else { metricSpecBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMetricSpec( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec value) { if (metricSpecBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && metricSpec_ != null && metricSpec_ != com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec .getDefaultInstance()) { getMetricSpecBuilder().mergeFrom(value); } else { metricSpec_ = value; } } else { metricSpecBuilder_.mergeFrom(value); } if (metricSpec_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMetricSpec() { bitField0_ = (bitField0_ & ~0x00000001); metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec.Builder getMetricSpecBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMetricSpecFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpecOrBuilder getMetricSpecOrBuilder() { if (metricSpecBuilder_ != null) { return metricSpecBuilder_.getMessageOrBuilder(); } else { return metricSpec_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec .getDefaultInstance() : metricSpec_; } } /** * * * <pre> * Required. Spec for question answering helpfulness score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpecOrBuilder> getMetricSpecFieldBuilder() { if (metricSpecBuilder_ == null) { metricSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpec.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessSpecOrBuilder>( getMetricSpec(), getParentForChildren(), isClean()); metricSpec_ = null; } return metricSpecBuilder_; } private com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstanceOrBuilder> instanceBuilder_; /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance getInstance() { if (instanceBuilder_ == null) { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance .getDefaultInstance() : instance_; } else { return instanceBuilder_.getMessage(); } } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance value) { if (instanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; } else { instanceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance.Builder builderForValue) { if (instanceBuilder_ == null) { instance_ = builderForValue.build(); } else { instanceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeInstance( com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance value) { if (instanceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && instance_ != null && instance_ != com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance .getDefaultInstance()) { getInstanceBuilder().mergeFrom(value); } else { instance_ = value; } } else { instanceBuilder_.mergeFrom(value); } if (instance_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance.Builder getInstanceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstanceOrBuilder getInstanceOrBuilder() { if (instanceBuilder_ != null) { return instanceBuilder_.getMessageOrBuilder(); } else { return instance_ == null ? com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance .getDefaultInstance() : instance_; } } /** * * * <pre> * Required. Question answering helpfulness instance. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstanceOrBuilder> getInstanceFieldBuilder() { if (instanceBuilder_ == null) { instanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstance.Builder, com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInstanceOrBuilder>( getInstance(), getParentForChildren(), isClean()); instance_ = null; } return instanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput) private static final com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput(); } public static com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<QuestionAnsweringHelpfulnessInput> PARSER = new com.google.protobuf.AbstractParser<QuestionAnsweringHelpfulnessInput>() { @java.lang.Override public QuestionAnsweringHelpfulnessInput parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<QuestionAnsweringHelpfulnessInput> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<QuestionAnsweringHelpfulnessInput> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,080
java-functions/proto-google-cloud-functions-v2/src/main/java/com/google/cloud/functions/v2/SecretEnvVar.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/functions/v2/functions.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.functions.v2; /** * * * <pre> * Configuration for a secret environment variable. It has the information * necessary to fetch the secret value from secret manager and expose it as an * environment variable. * </pre> * * Protobuf type {@code google.cloud.functions.v2.SecretEnvVar} */ public final class SecretEnvVar extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.functions.v2.SecretEnvVar) SecretEnvVarOrBuilder { private static final long serialVersionUID = 0L; // Use SecretEnvVar.newBuilder() to construct. private SecretEnvVar(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SecretEnvVar() { key_ = ""; projectId_ = ""; secret_ = ""; version_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SecretEnvVar(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_SecretEnvVar_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_SecretEnvVar_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2.SecretEnvVar.class, com.google.cloud.functions.v2.SecretEnvVar.Builder.class); } public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * * * <pre> * Name of the environment variable. * </pre> * * <code>string key = 1;</code> * * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); key_ = s; return s; } } /** * * * <pre> * Name of the environment variable. * </pre> * * <code>string key = 1;</code> * * @return The bytes for key. */ @java.lang.Override public com.google.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); key_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PROJECT_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object projectId_ = ""; /** * * * <pre> * Project identifier (preferably project number but can also be the * project ID) of the project that contains the secret. If not set, it is * assumed that the secret is in the same project as the function. * </pre> * * <code>string project_id = 2;</code> * * @return The projectId. */ @java.lang.Override public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * * * <pre> * Project identifier (preferably project number but can also be the * project ID) of the project that contains the secret. If not set, it is * assumed that the secret is in the same project as the function. * </pre> * * <code>string project_id = 2;</code> * * @return The bytes for projectId. */ @java.lang.Override public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SECRET_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object secret_ = ""; /** * * * <pre> * Name of the secret in secret manager (not the full resource name). * </pre> * * <code>string secret = 3;</code> * * @return The secret. */ @java.lang.Override public java.lang.String getSecret() { java.lang.Object ref = secret_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); secret_ = s; return s; } } /** * * * <pre> * Name of the secret in secret manager (not the full resource name). * </pre> * * <code>string secret = 3;</code> * * @return The bytes for secret. */ @java.lang.Override public com.google.protobuf.ByteString getSecretBytes() { java.lang.Object ref = secret_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); secret_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VERSION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object version_ = ""; /** * * * <pre> * Version of the secret (version number or the string 'latest'). It is * recommended to use a numeric version for secret environment variables as * any updates to the secret value is not reflected until new instances * start. * </pre> * * <code>string version = 4;</code> * * @return The version. */ @java.lang.Override public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } } /** * * * <pre> * Version of the secret (version number or the string 'latest'). It is * recommended to use a numeric version for secret environment variables as * any updates to the secret value is not reflected until new instances * start. * </pre> * * <code>string version = 4;</code> * * @return The bytes for version. */ @java.lang.Override public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secret_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, secret_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secret_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, secret_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.functions.v2.SecretEnvVar)) { return super.equals(obj); } com.google.cloud.functions.v2.SecretEnvVar other = (com.google.cloud.functions.v2.SecretEnvVar) obj; if (!getKey().equals(other.getKey())) return false; if (!getProjectId().equals(other.getProjectId())) return false; if (!getSecret().equals(other.getSecret())) return false; if (!getVersion().equals(other.getVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); hash = (37 * hash) + SECRET_FIELD_NUMBER; hash = (53 * hash) + getSecret().hashCode(); hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2.SecretEnvVar parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.functions.v2.SecretEnvVar parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2.SecretEnvVar parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.functions.v2.SecretEnvVar prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Configuration for a secret environment variable. It has the information * necessary to fetch the secret value from secret manager and expose it as an * environment variable. * </pre> * * Protobuf type {@code google.cloud.functions.v2.SecretEnvVar} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.functions.v2.SecretEnvVar) com.google.cloud.functions.v2.SecretEnvVarOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_SecretEnvVar_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_SecretEnvVar_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2.SecretEnvVar.class, com.google.cloud.functions.v2.SecretEnvVar.Builder.class); } // Construct using com.google.cloud.functions.v2.SecretEnvVar.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; projectId_ = ""; secret_ = ""; version_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_SecretEnvVar_descriptor; } @java.lang.Override public com.google.cloud.functions.v2.SecretEnvVar getDefaultInstanceForType() { return com.google.cloud.functions.v2.SecretEnvVar.getDefaultInstance(); } @java.lang.Override public com.google.cloud.functions.v2.SecretEnvVar build() { com.google.cloud.functions.v2.SecretEnvVar result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.functions.v2.SecretEnvVar buildPartial() { com.google.cloud.functions.v2.SecretEnvVar result = new com.google.cloud.functions.v2.SecretEnvVar(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.functions.v2.SecretEnvVar result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.projectId_ = projectId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.secret_ = secret_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.version_ = version_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.functions.v2.SecretEnvVar) { return mergeFrom((com.google.cloud.functions.v2.SecretEnvVar) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.functions.v2.SecretEnvVar other) { if (other == com.google.cloud.functions.v2.SecretEnvVar.getDefaultInstance()) return this; if (!other.getKey().isEmpty()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getSecret().isEmpty()) { secret_ = other.secret_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getVersion().isEmpty()) { version_ = other.version_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { projectId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { secret_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { version_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * * * <pre> * Name of the environment variable. * </pre> * * <code>string key = 1;</code> * * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); key_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the environment variable. * </pre> * * <code>string key = 1;</code> * * @return The bytes for key. */ public com.google.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); key_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the environment variable. * </pre> * * <code>string key = 1;</code> * * @param value The key to set. * @return This builder for chaining. */ public Builder setKey(java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of the environment variable. * </pre> * * <code>string key = 1;</code> * * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of the environment variable. * </pre> * * <code>string key = 1;</code> * * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object projectId_ = ""; /** * * * <pre> * Project identifier (preferably project number but can also be the * project ID) of the project that contains the secret. If not set, it is * assumed that the secret is in the same project as the function. * </pre> * * <code>string project_id = 2;</code> * * @return The projectId. */ public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project identifier (preferably project number but can also be the * project ID) of the project that contains the secret. If not set, it is * assumed that the secret is in the same project as the function. * </pre> * * <code>string project_id = 2;</code> * * @return The bytes for projectId. */ public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project identifier (preferably project number but can also be the * project ID) of the project that contains the secret. If not set, it is * assumed that the secret is in the same project as the function. * </pre> * * <code>string project_id = 2;</code> * * @param value The projectId to set. * @return This builder for chaining. */ public Builder setProjectId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Project identifier (preferably project number but can also be the * project ID) of the project that contains the secret. If not set, it is * assumed that the secret is in the same project as the function. * </pre> * * <code>string project_id = 2;</code> * * @return This builder for chaining. */ public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Project identifier (preferably project number but can also be the * project ID) of the project that contains the secret. If not set, it is * assumed that the secret is in the same project as the function. * </pre> * * <code>string project_id = 2;</code> * * @param value The bytes for projectId to set. * @return This builder for chaining. */ public Builder setProjectIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object secret_ = ""; /** * * * <pre> * Name of the secret in secret manager (not the full resource name). * </pre> * * <code>string secret = 3;</code> * * @return The secret. */ public java.lang.String getSecret() { java.lang.Object ref = secret_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); secret_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the secret in secret manager (not the full resource name). * </pre> * * <code>string secret = 3;</code> * * @return The bytes for secret. */ public com.google.protobuf.ByteString getSecretBytes() { java.lang.Object ref = secret_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); secret_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the secret in secret manager (not the full resource name). * </pre> * * <code>string secret = 3;</code> * * @param value The secret to set. * @return This builder for chaining. */ public Builder setSecret(java.lang.String value) { if (value == null) { throw new NullPointerException(); } secret_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Name of the secret in secret manager (not the full resource name). * </pre> * * <code>string secret = 3;</code> * * @return This builder for chaining. */ public Builder clearSecret() { secret_ = getDefaultInstance().getSecret(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Name of the secret in secret manager (not the full resource name). * </pre> * * <code>string secret = 3;</code> * * @param value The bytes for secret to set. * @return This builder for chaining. */ public Builder setSecretBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); secret_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object version_ = ""; /** * * * <pre> * Version of the secret (version number or the string 'latest'). It is * recommended to use a numeric version for secret environment variables as * any updates to the secret value is not reflected until new instances * start. * </pre> * * <code>string version = 4;</code> * * @return The version. */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Version of the secret (version number or the string 'latest'). It is * recommended to use a numeric version for secret environment variables as * any updates to the secret value is not reflected until new instances * start. * </pre> * * <code>string version = 4;</code> * * @return The bytes for version. */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Version of the secret (version number or the string 'latest'). It is * recommended to use a numeric version for secret environment variables as * any updates to the secret value is not reflected until new instances * start. * </pre> * * <code>string version = 4;</code> * * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } version_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Version of the secret (version number or the string 'latest'). It is * recommended to use a numeric version for secret environment variables as * any updates to the secret value is not reflected until new instances * start. * </pre> * * <code>string version = 4;</code> * * @return This builder for chaining. */ public Builder clearVersion() { version_ = getDefaultInstance().getVersion(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Version of the secret (version number or the string 'latest'). It is * recommended to use a numeric version for secret environment variables as * any updates to the secret value is not reflected until new instances * start. * </pre> * * <code>string version = 4;</code> * * @param value The bytes for version to set. * @return This builder for chaining. */ public Builder setVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); version_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.functions.v2.SecretEnvVar) } // @@protoc_insertion_point(class_scope:google.cloud.functions.v2.SecretEnvVar) private static final com.google.cloud.functions.v2.SecretEnvVar DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.functions.v2.SecretEnvVar(); } public static com.google.cloud.functions.v2.SecretEnvVar getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SecretEnvVar> PARSER = new com.google.protobuf.AbstractParser<SecretEnvVar>() { @java.lang.Override public SecretEnvVar parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SecretEnvVar> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SecretEnvVar> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.functions.v2.SecretEnvVar getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,126
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ResolveOperatorInfoRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/lva_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Request message for querying operator info. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ResolveOperatorInfoRequest} */ public final class ResolveOperatorInfoRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ResolveOperatorInfoRequest) ResolveOperatorInfoRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ResolveOperatorInfoRequest.newBuilder() to construct. private ResolveOperatorInfoRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ResolveOperatorInfoRequest() { parent_ = ""; queries_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ResolveOperatorInfoRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ResolveOperatorInfoRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ResolveOperatorInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ResolveOperatorInfoRequest.class, com.google.cloud.visionai.v1.ResolveOperatorInfoRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ResolveOperatorInfoRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Parent value for ResolveOperatorInfoRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int QUERIES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloud.visionai.v1.OperatorQuery> queries_; /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.visionai.v1.OperatorQuery> getQueriesList() { return queries_; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.visionai.v1.OperatorQueryOrBuilder> getQueriesOrBuilderList() { return queries_; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getQueriesCount() { return queries_.size(); } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.visionai.v1.OperatorQuery getQueries(int index) { return queries_.get(index); } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.visionai.v1.OperatorQueryOrBuilder getQueriesOrBuilder(int index) { return queries_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } for (int i = 0; i < queries_.size(); i++) { output.writeMessage(2, queries_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } for (int i = 0; i < queries_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, queries_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ResolveOperatorInfoRequest)) { return super.equals(obj); } com.google.cloud.visionai.v1.ResolveOperatorInfoRequest other = (com.google.cloud.visionai.v1.ResolveOperatorInfoRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getQueriesList().equals(other.getQueriesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (getQueriesCount() > 0) { hash = (37 * hash) + QUERIES_FIELD_NUMBER; hash = (53 * hash) + getQueriesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.visionai.v1.ResolveOperatorInfoRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for querying operator info. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ResolveOperatorInfoRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ResolveOperatorInfoRequest) com.google.cloud.visionai.v1.ResolveOperatorInfoRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ResolveOperatorInfoRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ResolveOperatorInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ResolveOperatorInfoRequest.class, com.google.cloud.visionai.v1.ResolveOperatorInfoRequest.Builder.class); } // Construct using com.google.cloud.visionai.v1.ResolveOperatorInfoRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; if (queriesBuilder_ == null) { queries_ = java.util.Collections.emptyList(); } else { queries_ = null; queriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ResolveOperatorInfoRequest_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ResolveOperatorInfoRequest getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ResolveOperatorInfoRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ResolveOperatorInfoRequest build() { com.google.cloud.visionai.v1.ResolveOperatorInfoRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ResolveOperatorInfoRequest buildPartial() { com.google.cloud.visionai.v1.ResolveOperatorInfoRequest result = new com.google.cloud.visionai.v1.ResolveOperatorInfoRequest(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.visionai.v1.ResolveOperatorInfoRequest result) { if (queriesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { queries_ = java.util.Collections.unmodifiableList(queries_); bitField0_ = (bitField0_ & ~0x00000002); } result.queries_ = queries_; } else { result.queries_ = queriesBuilder_.build(); } } private void buildPartial0(com.google.cloud.visionai.v1.ResolveOperatorInfoRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ResolveOperatorInfoRequest) { return mergeFrom((com.google.cloud.visionai.v1.ResolveOperatorInfoRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ResolveOperatorInfoRequest other) { if (other == com.google.cloud.visionai.v1.ResolveOperatorInfoRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (queriesBuilder_ == null) { if (!other.queries_.isEmpty()) { if (queries_.isEmpty()) { queries_ = other.queries_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureQueriesIsMutable(); queries_.addAll(other.queries_); } onChanged(); } } else { if (!other.queries_.isEmpty()) { if (queriesBuilder_.isEmpty()) { queriesBuilder_.dispose(); queriesBuilder_ = null; queries_ = other.queries_; bitField0_ = (bitField0_ & ~0x00000002); queriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getQueriesFieldBuilder() : null; } else { queriesBuilder_.addAllMessages(other.queries_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.cloud.visionai.v1.OperatorQuery m = input.readMessage( com.google.cloud.visionai.v1.OperatorQuery.parser(), extensionRegistry); if (queriesBuilder_ == null) { ensureQueriesIsMutable(); queries_.add(m); } else { queriesBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ResolveOperatorInfoRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Parent value for ResolveOperatorInfoRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Parent value for ResolveOperatorInfoRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parent value for ResolveOperatorInfoRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Parent value for ResolveOperatorInfoRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.cloud.visionai.v1.OperatorQuery> queries_ = java.util.Collections.emptyList(); private void ensureQueriesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { queries_ = new java.util.ArrayList<com.google.cloud.visionai.v1.OperatorQuery>(queries_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.OperatorQuery, com.google.cloud.visionai.v1.OperatorQuery.Builder, com.google.cloud.visionai.v1.OperatorQueryOrBuilder> queriesBuilder_; /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.visionai.v1.OperatorQuery> getQueriesList() { if (queriesBuilder_ == null) { return java.util.Collections.unmodifiableList(queries_); } else { return queriesBuilder_.getMessageList(); } } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getQueriesCount() { if (queriesBuilder_ == null) { return queries_.size(); } else { return queriesBuilder_.getCount(); } } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.visionai.v1.OperatorQuery getQueries(int index) { if (queriesBuilder_ == null) { return queries_.get(index); } else { return queriesBuilder_.getMessage(index); } } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQueries(int index, com.google.cloud.visionai.v1.OperatorQuery value) { if (queriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueriesIsMutable(); queries_.set(index, value); onChanged(); } else { queriesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQueries( int index, com.google.cloud.visionai.v1.OperatorQuery.Builder builderForValue) { if (queriesBuilder_ == null) { ensureQueriesIsMutable(); queries_.set(index, builderForValue.build()); onChanged(); } else { queriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addQueries(com.google.cloud.visionai.v1.OperatorQuery value) { if (queriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueriesIsMutable(); queries_.add(value); onChanged(); } else { queriesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addQueries(int index, com.google.cloud.visionai.v1.OperatorQuery value) { if (queriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureQueriesIsMutable(); queries_.add(index, value); onChanged(); } else { queriesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addQueries(com.google.cloud.visionai.v1.OperatorQuery.Builder builderForValue) { if (queriesBuilder_ == null) { ensureQueriesIsMutable(); queries_.add(builderForValue.build()); onChanged(); } else { queriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addQueries( int index, com.google.cloud.visionai.v1.OperatorQuery.Builder builderForValue) { if (queriesBuilder_ == null) { ensureQueriesIsMutable(); queries_.add(index, builderForValue.build()); onChanged(); } else { queriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllQueries( java.lang.Iterable<? extends com.google.cloud.visionai.v1.OperatorQuery> values) { if (queriesBuilder_ == null) { ensureQueriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, queries_); onChanged(); } else { queriesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearQueries() { if (queriesBuilder_ == null) { queries_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { queriesBuilder_.clear(); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removeQueries(int index) { if (queriesBuilder_ == null) { ensureQueriesIsMutable(); queries_.remove(index); onChanged(); } else { queriesBuilder_.remove(index); } return this; } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.visionai.v1.OperatorQuery.Builder getQueriesBuilder(int index) { return getQueriesFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.visionai.v1.OperatorQueryOrBuilder getQueriesOrBuilder(int index) { if (queriesBuilder_ == null) { return queries_.get(index); } else { return queriesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<? extends com.google.cloud.visionai.v1.OperatorQueryOrBuilder> getQueriesOrBuilderList() { if (queriesBuilder_ != null) { return queriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(queries_); } } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.visionai.v1.OperatorQuery.Builder addQueriesBuilder() { return getQueriesFieldBuilder() .addBuilder(com.google.cloud.visionai.v1.OperatorQuery.getDefaultInstance()); } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.visionai.v1.OperatorQuery.Builder addQueriesBuilder(int index) { return getQueriesFieldBuilder() .addBuilder(index, com.google.cloud.visionai.v1.OperatorQuery.getDefaultInstance()); } /** * * * <pre> * Required. The operator queries. * </pre> * * <code> * repeated .google.cloud.visionai.v1.OperatorQuery queries = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.visionai.v1.OperatorQuery.Builder> getQueriesBuilderList() { return getQueriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.OperatorQuery, com.google.cloud.visionai.v1.OperatorQuery.Builder, com.google.cloud.visionai.v1.OperatorQueryOrBuilder> getQueriesFieldBuilder() { if (queriesBuilder_ == null) { queriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.OperatorQuery, com.google.cloud.visionai.v1.OperatorQuery.Builder, com.google.cloud.visionai.v1.OperatorQueryOrBuilder>( queries_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); queries_ = null; } return queriesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ResolveOperatorInfoRequest) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ResolveOperatorInfoRequest) private static final com.google.cloud.visionai.v1.ResolveOperatorInfoRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ResolveOperatorInfoRequest(); } public static com.google.cloud.visionai.v1.ResolveOperatorInfoRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ResolveOperatorInfoRequest> PARSER = new com.google.protobuf.AbstractParser<ResolveOperatorInfoRequest>() { @java.lang.Override public ResolveOperatorInfoRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ResolveOperatorInfoRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ResolveOperatorInfoRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ResolveOperatorInfoRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,141
java-dataform/proto-google-cloud-dataform-v1/src/main/java/com/google/cloud/dataform/v1/QueryDirectoryContentsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataform/v1/dataform.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataform.v1; /** * * * <pre> * `QueryDirectoryContents` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1.QueryDirectoryContentsRequest} */ public final class QueryDirectoryContentsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataform.v1.QueryDirectoryContentsRequest) QueryDirectoryContentsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use QueryDirectoryContentsRequest.newBuilder() to construct. private QueryDirectoryContentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private QueryDirectoryContentsRequest() { workspace_ = ""; path_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new QueryDirectoryContentsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_QueryDirectoryContentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_QueryDirectoryContentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1.QueryDirectoryContentsRequest.class, com.google.cloud.dataform.v1.QueryDirectoryContentsRequest.Builder.class); } public static final int WORKSPACE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object workspace_ = ""; /** * * * <pre> * Required. The workspace's name. * </pre> * * <code> * string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The workspace. */ @java.lang.Override public java.lang.String getWorkspace() { java.lang.Object ref = workspace_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); workspace_ = s; return s; } } /** * * * <pre> * Required. The workspace's name. * </pre> * * <code> * string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for workspace. */ @java.lang.Override public com.google.protobuf.ByteString getWorkspaceBytes() { java.lang.Object ref = workspace_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); workspace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PATH_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object path_ = ""; /** * * * <pre> * Optional. The directory's full path including directory name, relative to * the workspace root. If left unset, the workspace root is used. * </pre> * * <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The path. */ @java.lang.Override public java.lang.String getPath() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } } /** * * * <pre> * Optional. The directory's full path including directory name, relative to * the workspace root. If left unset, the workspace root is used. * </pre> * * <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for path. */ @java.lang.Override public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_ = 0; /** * * * <pre> * Optional. Maximum number of paths to return. The server may return fewer * items than requested. If unspecified, the server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Page token received from a previous `QueryDirectoryContents` * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * `QueryDirectoryContents`, with the exception of `page_size`, must match the * call that provided the page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. Page token received from a previous `QueryDirectoryContents` * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * `QueryDirectoryContents`, with the exception of `page_size`, must match the * call that provided the page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workspace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, workspace_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, path_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workspace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, workspace_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, path_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataform.v1.QueryDirectoryContentsRequest)) { return super.equals(obj); } com.google.cloud.dataform.v1.QueryDirectoryContentsRequest other = (com.google.cloud.dataform.v1.QueryDirectoryContentsRequest) obj; if (!getWorkspace().equals(other.getWorkspace())) return false; if (!getPath().equals(other.getPath())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + WORKSPACE_FIELD_NUMBER; hash = (53 * hash) + getWorkspace().hashCode(); hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dataform.v1.QueryDirectoryContentsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * `QueryDirectoryContents` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1.QueryDirectoryContentsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1.QueryDirectoryContentsRequest) com.google.cloud.dataform.v1.QueryDirectoryContentsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_QueryDirectoryContentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_QueryDirectoryContentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1.QueryDirectoryContentsRequest.class, com.google.cloud.dataform.v1.QueryDirectoryContentsRequest.Builder.class); } // Construct using com.google.cloud.dataform.v1.QueryDirectoryContentsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; workspace_ = ""; path_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_QueryDirectoryContentsRequest_descriptor; } @java.lang.Override public com.google.cloud.dataform.v1.QueryDirectoryContentsRequest getDefaultInstanceForType() { return com.google.cloud.dataform.v1.QueryDirectoryContentsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataform.v1.QueryDirectoryContentsRequest build() { com.google.cloud.dataform.v1.QueryDirectoryContentsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataform.v1.QueryDirectoryContentsRequest buildPartial() { com.google.cloud.dataform.v1.QueryDirectoryContentsRequest result = new com.google.cloud.dataform.v1.QueryDirectoryContentsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataform.v1.QueryDirectoryContentsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.workspace_ = workspace_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.path_ = path_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataform.v1.QueryDirectoryContentsRequest) { return mergeFrom((com.google.cloud.dataform.v1.QueryDirectoryContentsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataform.v1.QueryDirectoryContentsRequest other) { if (other == com.google.cloud.dataform.v1.QueryDirectoryContentsRequest.getDefaultInstance()) return this; if (!other.getWorkspace().isEmpty()) { workspace_ = other.workspace_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getPath().isEmpty()) { path_ = other.path_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { workspace_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { path_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object workspace_ = ""; /** * * * <pre> * Required. The workspace's name. * </pre> * * <code> * string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The workspace. */ public java.lang.String getWorkspace() { java.lang.Object ref = workspace_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); workspace_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The workspace's name. * </pre> * * <code> * string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for workspace. */ public com.google.protobuf.ByteString getWorkspaceBytes() { java.lang.Object ref = workspace_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); workspace_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The workspace's name. * </pre> * * <code> * string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The workspace to set. * @return This builder for chaining. */ public Builder setWorkspace(java.lang.String value) { if (value == null) { throw new NullPointerException(); } workspace_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The workspace's name. * </pre> * * <code> * string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearWorkspace() { workspace_ = getDefaultInstance().getWorkspace(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The workspace's name. * </pre> * * <code> * string workspace = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for workspace to set. * @return This builder for chaining. */ public Builder setWorkspaceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); workspace_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object path_ = ""; /** * * * <pre> * Optional. The directory's full path including directory name, relative to * the workspace root. If left unset, the workspace root is used. * </pre> * * <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The path. */ public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The directory's full path including directory name, relative to * the workspace root. If left unset, the workspace root is used. * </pre> * * <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for path. */ public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The directory's full path including directory name, relative to * the workspace root. If left unset, the workspace root is used. * </pre> * * <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The path to set. * @return This builder for chaining. */ public Builder setPath(java.lang.String value) { if (value == null) { throw new NullPointerException(); } path_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The directory's full path including directory name, relative to * the workspace root. If left unset, the workspace root is used. * </pre> * * <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPath() { path_ = getDefaultInstance().getPath(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. The directory's full path including directory name, relative to * the workspace root. If left unset, the workspace root is used. * </pre> * * <code>string path = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for path to set. * @return This builder for chaining. */ public Builder setPathBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); path_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Maximum number of paths to return. The server may return fewer * items than requested. If unspecified, the server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Maximum number of paths to return. The server may return fewer * items than requested. If unspecified, the server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Maximum number of paths to return. The server may return fewer * items than requested. If unspecified, the server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Page token received from a previous `QueryDirectoryContents` * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * `QueryDirectoryContents`, with the exception of `page_size`, must match the * call that provided the page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Page token received from a previous `QueryDirectoryContents` * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * `QueryDirectoryContents`, with the exception of `page_size`, must match the * call that provided the page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Page token received from a previous `QueryDirectoryContents` * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * `QueryDirectoryContents`, with the exception of `page_size`, must match the * call that provided the page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Page token received from a previous `QueryDirectoryContents` * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * `QueryDirectoryContents`, with the exception of `page_size`, must match the * call that provided the page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Page token received from a previous `QueryDirectoryContents` * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * `QueryDirectoryContents`, with the exception of `page_size`, must match the * call that provided the page token. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1.QueryDirectoryContentsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataform.v1.QueryDirectoryContentsRequest) private static final com.google.cloud.dataform.v1.QueryDirectoryContentsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataform.v1.QueryDirectoryContentsRequest(); } public static com.google.cloud.dataform.v1.QueryDirectoryContentsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<QueryDirectoryContentsRequest> PARSER = new com.google.protobuf.AbstractParser<QueryDirectoryContentsRequest>() { @java.lang.Override public QueryDirectoryContentsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<QueryDirectoryContentsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<QueryDirectoryContentsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataform.v1.QueryDirectoryContentsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,100
java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/PurgeUserEventsMetadata.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1beta/purge_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1beta; /** * * * <pre> * Metadata related to the progress of the PurgeUserEvents operation. * This will be returned by the google.longrunning.Operation.metadata field. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata} */ public final class PurgeUserEventsMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata) PurgeUserEventsMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use PurgeUserEventsMetadata.newBuilder() to construct. private PurgeUserEventsMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PurgeUserEventsMetadata() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PurgeUserEventsMetadata(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.PurgeConfigProto .internal_static_google_cloud_discoveryengine_v1beta_PurgeUserEventsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.PurgeConfigProto .internal_static_google_cloud_discoveryengine_v1beta_PurgeUserEventsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata.class, com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata.Builder.class); } private int bitField0_; public static final int CREATE_TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp createTime_; /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return Whether the createTime field is set. */ @java.lang.Override public boolean hasCreateTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return The createTime. */ @java.lang.Override public com.google.protobuf.Timestamp getCreateTime() { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } public static final int UPDATE_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp updateTime_; /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ @java.lang.Override public boolean hasUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ @java.lang.Override public com.google.protobuf.Timestamp getUpdateTime() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } public static final int SUCCESS_COUNT_FIELD_NUMBER = 3; private long successCount_ = 0L; /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return The successCount. */ @java.lang.Override public long getSuccessCount() { return successCount_; } public static final int FAILURE_COUNT_FIELD_NUMBER = 4; private long failureCount_ = 0L; /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return The failureCount. */ @java.lang.Override public long getFailureCount() { return failureCount_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCreateTime()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateTime()); } if (successCount_ != 0L) { output.writeInt64(3, successCount_); } if (failureCount_ != 0L) { output.writeInt64(4, failureCount_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCreateTime()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateTime()); } if (successCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, successCount_); } if (failureCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, failureCount_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata other = (com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata) obj; if (hasCreateTime() != other.hasCreateTime()) return false; if (hasCreateTime()) { if (!getCreateTime().equals(other.getCreateTime())) return false; } if (hasUpdateTime() != other.hasUpdateTime()) return false; if (hasUpdateTime()) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } if (getSuccessCount() != other.getSuccessCount()) return false; if (getFailureCount() != other.getFailureCount()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCreateTime()) { hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getCreateTime().hashCode(); } if (hasUpdateTime()) { hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getUpdateTime().hashCode(); } hash = (37 * hash) + SUCCESS_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSuccessCount()); hash = (37 * hash) + FAILURE_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFailureCount()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Metadata related to the progress of the PurgeUserEvents operation. * This will be returned by the google.longrunning.Operation.metadata field. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata) com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.PurgeConfigProto .internal_static_google_cloud_discoveryengine_v1beta_PurgeUserEventsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.PurgeConfigProto .internal_static_google_cloud_discoveryengine_v1beta_PurgeUserEventsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata.class, com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCreateTimeFieldBuilder(); getUpdateTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; createTime_ = null; if (createTimeBuilder_ != null) { createTimeBuilder_.dispose(); createTimeBuilder_ = null; } updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } successCount_ = 0L; failureCount_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1beta.PurgeConfigProto .internal_static_google_cloud_discoveryengine_v1beta_PurgeUserEventsMetadata_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata build() { com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata buildPartial() { com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata result = new com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.successCount_ = successCount_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.failureCount_ = failureCount_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata) { return mergeFrom((com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata other) { if (other == com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata.getDefaultInstance()) return this; if (other.hasCreateTime()) { mergeCreateTime(other.getCreateTime()); } if (other.hasUpdateTime()) { mergeUpdateTime(other.getUpdateTime()); } if (other.getSuccessCount() != 0L) { setSuccessCount(other.getSuccessCount()); } if (other.getFailureCount() != 0L) { setFailureCount(other.getFailureCount()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { successCount_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { failureCount_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Timestamp createTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> createTimeBuilder_; /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return Whether the createTime field is set. */ public boolean hasCreateTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return The createTime. */ public com.google.protobuf.Timestamp getCreateTime() { if (createTimeBuilder_ == null) { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } else { return createTimeBuilder_.getMessage(); } } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder setCreateTime(com.google.protobuf.Timestamp value) { if (createTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } createTime_ = value; } else { createTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (createTimeBuilder_ == null) { createTime_ = builderForValue.build(); } else { createTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { if (createTimeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && createTime_ != null && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getCreateTimeBuilder().mergeFrom(value); } else { createTime_ = value; } } else { createTimeBuilder_.mergeFrom(value); } if (createTime_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder clearCreateTime() { bitField0_ = (bitField0_ & ~0x00000001); createTime_ = null; if (createTimeBuilder_ != null) { createTimeBuilder_.dispose(); createTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCreateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { if (createTimeBuilder_ != null) { return createTimeBuilder_.getMessageOrBuilder(); } else { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getCreateTimeFieldBuilder() { if (createTimeBuilder_ == null) { createTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getCreateTime(), getParentForChildren(), isClean()); createTime_ = null; } return createTimeBuilder_; } private com.google.protobuf.Timestamp updateTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> updateTimeBuilder_; /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ public boolean hasUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ public com.google.protobuf.Timestamp getUpdateTime() { if (updateTimeBuilder_ == null) { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } else { return updateTimeBuilder_.getMessage(); } } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateTime_ = value; } else { updateTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (updateTimeBuilder_ == null) { updateTime_ = builderForValue.build(); } else { updateTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateTime_ != null && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getUpdateTimeBuilder().mergeFrom(value); } else { updateTime_ = value; } } else { updateTimeBuilder_.mergeFrom(value); } if (updateTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder clearUpdateTime() { bitField0_ = (bitField0_ & ~0x00000002); updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { if (updateTimeBuilder_ != null) { return updateTimeBuilder_.getMessageOrBuilder(); } else { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getUpdateTimeFieldBuilder() { if (updateTimeBuilder_ == null) { updateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getUpdateTime(), getParentForChildren(), isClean()); updateTime_ = null; } return updateTimeBuilder_; } private long successCount_; /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return The successCount. */ @java.lang.Override public long getSuccessCount() { return successCount_; } /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @param value The successCount to set. * @return This builder for chaining. */ public Builder setSuccessCount(long value) { successCount_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return This builder for chaining. */ public Builder clearSuccessCount() { bitField0_ = (bitField0_ & ~0x00000004); successCount_ = 0L; onChanged(); return this; } private long failureCount_; /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return The failureCount. */ @java.lang.Override public long getFailureCount() { return failureCount_; } /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @param value The failureCount to set. * @return This builder for chaining. */ public Builder setFailureCount(long value) { failureCount_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return This builder for chaining. */ public Builder clearFailureCount() { bitField0_ = (bitField0_ & ~0x00000008); failureCount_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata) private static final com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata(); } public static com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PurgeUserEventsMetadata> PARSER = new com.google.protobuf.AbstractParser<PurgeUserEventsMetadata>() { @java.lang.Override public PurgeUserEventsMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PurgeUserEventsMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PurgeUserEventsMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.PurgeUserEventsMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,227
java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListSampleQueriesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1alpha/sample_query_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1alpha; /** * * * <pre> * Request message for * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest} */ public final class ListSampleQueriesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest) ListSampleQueriesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListSampleQueriesRequest.newBuilder() to construct. private ListSampleQueriesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSampleQueriesRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSampleQueriesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.SampleQueryServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListSampleQueriesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.SampleQueryServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListSampleQueriesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest.class, com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent sample query set resource name, such as * `projects/{project}/locations/{location}/sampleQuerySets/{sampleQuerySet}`. * * If the caller does not have permission to list * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s under this * sample query set, regardless of whether or not this sample query set * exists, a `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent sample query set resource name, such as * `projects/{project}/locations/{location}/sampleQuerySets/{sampleQuerySet}`. * * If the caller does not have permission to list * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s under this * sample query set, regardless of whether or not this sample query set * exists, a `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Maximum number of * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s to return. * If unspecified, defaults to 100. The maximum allowed value is 1000. Values * above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListSampleQueriesResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListSampleQueriesResponse.next_page_token], * received from a previous * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token * [ListSampleQueriesResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListSampleQueriesResponse.next_page_token], * received from a previous * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest other = (com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest) com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.SampleQueryServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListSampleQueriesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.SampleQueryServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListSampleQueriesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest.class, com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest.Builder.class); } // Construct using // com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1alpha.SampleQueryServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListSampleQueriesRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest build() { com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest buildPartial() { com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest result = new com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest other) { if (other == com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent sample query set resource name, such as * `projects/{project}/locations/{location}/sampleQuerySets/{sampleQuerySet}`. * * If the caller does not have permission to list * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s under this * sample query set, regardless of whether or not this sample query set * exists, a `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent sample query set resource name, such as * `projects/{project}/locations/{location}/sampleQuerySets/{sampleQuerySet}`. * * If the caller does not have permission to list * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s under this * sample query set, regardless of whether or not this sample query set * exists, a `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent sample query set resource name, such as * `projects/{project}/locations/{location}/sampleQuerySets/{sampleQuerySet}`. * * If the caller does not have permission to list * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s under this * sample query set, regardless of whether or not this sample query set * exists, a `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent sample query set resource name, such as * `projects/{project}/locations/{location}/sampleQuerySets/{sampleQuerySet}`. * * If the caller does not have permission to list * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s under this * sample query set, regardless of whether or not this sample query set * exists, a `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent sample query set resource name, such as * `projects/{project}/locations/{location}/sampleQuerySets/{sampleQuerySet}`. * * If the caller does not have permission to list * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s under this * sample query set, regardless of whether or not this sample query set * exists, a `PERMISSION_DENIED` error is returned. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Maximum number of * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s to return. * If unspecified, defaults to 100. The maximum allowed value is 1000. Values * above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Maximum number of * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s to return. * If unspecified, defaults to 100. The maximum allowed value is 1000. Values * above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Maximum number of * [SampleQuery][google.cloud.discoveryengine.v1alpha.SampleQuery]s to return. * If unspecified, defaults to 100. The maximum allowed value is 1000. Values * above 1000 will be coerced to 1000. * * If this field is negative, an `INVALID_ARGUMENT` error is returned. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token * [ListSampleQueriesResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListSampleQueriesResponse.next_page_token], * received from a previous * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token * [ListSampleQueriesResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListSampleQueriesResponse.next_page_token], * received from a previous * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token * [ListSampleQueriesResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListSampleQueriesResponse.next_page_token], * received from a previous * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A page token * [ListSampleQueriesResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListSampleQueriesResponse.next_page_token], * received from a previous * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A page token * [ListSampleQueriesResponse.next_page_token][google.cloud.discoveryengine.v1alpha.ListSampleQueriesResponse.next_page_token], * received from a previous * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * call. Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to * [SampleQueryService.ListSampleQueries][google.cloud.discoveryengine.v1alpha.SampleQueryService.ListSampleQueries] * must match the call that provided the page token. Otherwise, an * `INVALID_ARGUMENT` error is returned. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest) private static final com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest(); } public static com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSampleQueriesRequest> PARSER = new com.google.protobuf.AbstractParser<ListSampleQueriesRequest>() { @java.lang.Override public ListSampleQueriesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSampleQueriesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSampleQueriesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListSampleQueriesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,471
java-private-catalog/grpc-google-cloud-private-catalog-v1beta1/src/main/java/com/google/cloud/privatecatalog/v1beta1/PrivateCatalogGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.privatecatalog.v1beta1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * `PrivateCatalog` allows catalog consumers to retrieve `Catalog`, `Product` * and `Version` resources under a target resource context. * `Catalog` is computed based on the [Association][]s linked to the target * resource and its ancestors. Each association's * [google.cloud.privatecatalogproducer.v1beta.Catalog][] is transformed into a * `Catalog`. If multiple associations have the same parent * [google.cloud.privatecatalogproducer.v1beta.Catalog][], they are * de-duplicated into one `Catalog`. Users must have * `cloudprivatecatalog.catalogTargets.get` IAM permission on the resource * context in order to access catalogs. `Catalog` contains the resource name and * a subset of data of the original * [google.cloud.privatecatalogproducer.v1beta.Catalog][]. * `Product` is child resource of the catalog. A `Product` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Product][]. * `Version` is child resource of the product. A `Version` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Version][]. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/privatecatalog/v1beta1/private_catalog.proto") @io.grpc.stub.annotations.GrpcGenerated public final class PrivateCatalogGrpc { private PrivateCatalogGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.privatecatalog.v1beta1.PrivateCatalog"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest, com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse> getSearchCatalogsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "SearchCatalogs", requestType = com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.class, responseType = com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest, com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse> getSearchCatalogsMethod() { io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest, com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse> getSearchCatalogsMethod; if ((getSearchCatalogsMethod = PrivateCatalogGrpc.getSearchCatalogsMethod) == null) { synchronized (PrivateCatalogGrpc.class) { if ((getSearchCatalogsMethod = PrivateCatalogGrpc.getSearchCatalogsMethod) == null) { PrivateCatalogGrpc.getSearchCatalogsMethod = getSearchCatalogsMethod = io.grpc.MethodDescriptor .<com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest, com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "SearchCatalogs")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse .getDefaultInstance())) .setSchemaDescriptor( new PrivateCatalogMethodDescriptorSupplier("SearchCatalogs")) .build(); } } } return getSearchCatalogsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest, com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse> getSearchProductsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "SearchProducts", requestType = com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest.class, responseType = com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest, com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse> getSearchProductsMethod() { io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest, com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse> getSearchProductsMethod; if ((getSearchProductsMethod = PrivateCatalogGrpc.getSearchProductsMethod) == null) { synchronized (PrivateCatalogGrpc.class) { if ((getSearchProductsMethod = PrivateCatalogGrpc.getSearchProductsMethod) == null) { PrivateCatalogGrpc.getSearchProductsMethod = getSearchProductsMethod = io.grpc.MethodDescriptor .<com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest, com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "SearchProducts")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse .getDefaultInstance())) .setSchemaDescriptor( new PrivateCatalogMethodDescriptorSupplier("SearchProducts")) .build(); } } } return getSearchProductsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest, com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse> getSearchVersionsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "SearchVersions", requestType = com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.class, responseType = com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest, com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse> getSearchVersionsMethod() { io.grpc.MethodDescriptor< com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest, com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse> getSearchVersionsMethod; if ((getSearchVersionsMethod = PrivateCatalogGrpc.getSearchVersionsMethod) == null) { synchronized (PrivateCatalogGrpc.class) { if ((getSearchVersionsMethod = PrivateCatalogGrpc.getSearchVersionsMethod) == null) { PrivateCatalogGrpc.getSearchVersionsMethod = getSearchVersionsMethod = io.grpc.MethodDescriptor .<com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest, com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "SearchVersions")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse .getDefaultInstance())) .setSchemaDescriptor( new PrivateCatalogMethodDescriptorSupplier("SearchVersions")) .build(); } } } return getSearchVersionsMethod; } /** Creates a new async stub that supports all call types for the service */ public static PrivateCatalogStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogStub>() { @java.lang.Override public PrivateCatalogStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogStub(channel, callOptions); } }; return PrivateCatalogStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static PrivateCatalogBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogBlockingV2Stub>() { @java.lang.Override public PrivateCatalogBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogBlockingV2Stub(channel, callOptions); } }; return PrivateCatalogBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static PrivateCatalogBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogBlockingStub>() { @java.lang.Override public PrivateCatalogBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogBlockingStub(channel, callOptions); } }; return PrivateCatalogBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static PrivateCatalogFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PrivateCatalogFutureStub>() { @java.lang.Override public PrivateCatalogFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogFutureStub(channel, callOptions); } }; return PrivateCatalogFutureStub.newStub(factory, channel); } /** * * * <pre> * `PrivateCatalog` allows catalog consumers to retrieve `Catalog`, `Product` * and `Version` resources under a target resource context. * `Catalog` is computed based on the [Association][]s linked to the target * resource and its ancestors. Each association's * [google.cloud.privatecatalogproducer.v1beta.Catalog][] is transformed into a * `Catalog`. If multiple associations have the same parent * [google.cloud.privatecatalogproducer.v1beta.Catalog][], they are * de-duplicated into one `Catalog`. Users must have * `cloudprivatecatalog.catalogTargets.get` IAM permission on the resource * context in order to access catalogs. `Catalog` contains the resource name and * a subset of data of the original * [google.cloud.privatecatalogproducer.v1beta.Catalog][]. * `Product` is child resource of the catalog. A `Product` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Product][]. * `Version` is child resource of the product. A `Version` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Version][]. * </pre> */ public interface AsyncService { /** * * * <pre> * Search [Catalog][google.cloud.privatecatalog.v1beta1.Catalog] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ default void searchCatalogs( com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getSearchCatalogsMethod(), responseObserver); } /** * * * <pre> * Search [Product][google.cloud.privatecatalog.v1beta1.Product] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ default void searchProducts( com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getSearchProductsMethod(), responseObserver); } /** * * * <pre> * Search [Version][google.cloud.privatecatalog.v1beta1.Version] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ default void searchVersions( com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getSearchVersionsMethod(), responseObserver); } } /** * Base class for the server implementation of the service PrivateCatalog. * * <pre> * `PrivateCatalog` allows catalog consumers to retrieve `Catalog`, `Product` * and `Version` resources under a target resource context. * `Catalog` is computed based on the [Association][]s linked to the target * resource and its ancestors. Each association's * [google.cloud.privatecatalogproducer.v1beta.Catalog][] is transformed into a * `Catalog`. If multiple associations have the same parent * [google.cloud.privatecatalogproducer.v1beta.Catalog][], they are * de-duplicated into one `Catalog`. Users must have * `cloudprivatecatalog.catalogTargets.get` IAM permission on the resource * context in order to access catalogs. `Catalog` contains the resource name and * a subset of data of the original * [google.cloud.privatecatalogproducer.v1beta.Catalog][]. * `Product` is child resource of the catalog. A `Product` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Product][]. * `Version` is child resource of the product. A `Version` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Version][]. * </pre> */ public abstract static class PrivateCatalogImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return PrivateCatalogGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service PrivateCatalog. * * <pre> * `PrivateCatalog` allows catalog consumers to retrieve `Catalog`, `Product` * and `Version` resources under a target resource context. * `Catalog` is computed based on the [Association][]s linked to the target * resource and its ancestors. Each association's * [google.cloud.privatecatalogproducer.v1beta.Catalog][] is transformed into a * `Catalog`. If multiple associations have the same parent * [google.cloud.privatecatalogproducer.v1beta.Catalog][], they are * de-duplicated into one `Catalog`. Users must have * `cloudprivatecatalog.catalogTargets.get` IAM permission on the resource * context in order to access catalogs. `Catalog` contains the resource name and * a subset of data of the original * [google.cloud.privatecatalogproducer.v1beta.Catalog][]. * `Product` is child resource of the catalog. A `Product` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Product][]. * `Version` is child resource of the product. A `Version` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Version][]. * </pre> */ public static final class PrivateCatalogStub extends io.grpc.stub.AbstractAsyncStub<PrivateCatalogStub> { private PrivateCatalogStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PrivateCatalogStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogStub(channel, callOptions); } /** * * * <pre> * Search [Catalog][google.cloud.privatecatalog.v1beta1.Catalog] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public void searchCatalogs( com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getSearchCatalogsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Search [Product][google.cloud.privatecatalog.v1beta1.Product] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public void searchProducts( com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getSearchProductsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Search [Version][google.cloud.privatecatalog.v1beta1.Version] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public void searchVersions( com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getSearchVersionsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service PrivateCatalog. * * <pre> * `PrivateCatalog` allows catalog consumers to retrieve `Catalog`, `Product` * and `Version` resources under a target resource context. * `Catalog` is computed based on the [Association][]s linked to the target * resource and its ancestors. Each association's * [google.cloud.privatecatalogproducer.v1beta.Catalog][] is transformed into a * `Catalog`. If multiple associations have the same parent * [google.cloud.privatecatalogproducer.v1beta.Catalog][], they are * de-duplicated into one `Catalog`. Users must have * `cloudprivatecatalog.catalogTargets.get` IAM permission on the resource * context in order to access catalogs. `Catalog` contains the resource name and * a subset of data of the original * [google.cloud.privatecatalogproducer.v1beta.Catalog][]. * `Product` is child resource of the catalog. A `Product` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Product][]. * `Version` is child resource of the product. A `Version` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Version][]. * </pre> */ public static final class PrivateCatalogBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<PrivateCatalogBlockingV2Stub> { private PrivateCatalogBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PrivateCatalogBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Search [Catalog][google.cloud.privatecatalog.v1beta1.Catalog] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse searchCatalogs( com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSearchCatalogsMethod(), getCallOptions(), request); } /** * * * <pre> * Search [Product][google.cloud.privatecatalog.v1beta1.Product] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse searchProducts( com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSearchProductsMethod(), getCallOptions(), request); } /** * * * <pre> * Search [Version][google.cloud.privatecatalog.v1beta1.Version] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse searchVersions( com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSearchVersionsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service PrivateCatalog. * * <pre> * `PrivateCatalog` allows catalog consumers to retrieve `Catalog`, `Product` * and `Version` resources under a target resource context. * `Catalog` is computed based on the [Association][]s linked to the target * resource and its ancestors. Each association's * [google.cloud.privatecatalogproducer.v1beta.Catalog][] is transformed into a * `Catalog`. If multiple associations have the same parent * [google.cloud.privatecatalogproducer.v1beta.Catalog][], they are * de-duplicated into one `Catalog`. Users must have * `cloudprivatecatalog.catalogTargets.get` IAM permission on the resource * context in order to access catalogs. `Catalog` contains the resource name and * a subset of data of the original * [google.cloud.privatecatalogproducer.v1beta.Catalog][]. * `Product` is child resource of the catalog. A `Product` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Product][]. * `Version` is child resource of the product. A `Version` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Version][]. * </pre> */ public static final class PrivateCatalogBlockingStub extends io.grpc.stub.AbstractBlockingStub<PrivateCatalogBlockingStub> { private PrivateCatalogBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PrivateCatalogBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogBlockingStub(channel, callOptions); } /** * * * <pre> * Search [Catalog][google.cloud.privatecatalog.v1beta1.Catalog] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse searchCatalogs( com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSearchCatalogsMethod(), getCallOptions(), request); } /** * * * <pre> * Search [Product][google.cloud.privatecatalog.v1beta1.Product] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse searchProducts( com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSearchProductsMethod(), getCallOptions(), request); } /** * * * <pre> * Search [Version][google.cloud.privatecatalog.v1beta1.Version] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse searchVersions( com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSearchVersionsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service PrivateCatalog. * * <pre> * `PrivateCatalog` allows catalog consumers to retrieve `Catalog`, `Product` * and `Version` resources under a target resource context. * `Catalog` is computed based on the [Association][]s linked to the target * resource and its ancestors. Each association's * [google.cloud.privatecatalogproducer.v1beta.Catalog][] is transformed into a * `Catalog`. If multiple associations have the same parent * [google.cloud.privatecatalogproducer.v1beta.Catalog][], they are * de-duplicated into one `Catalog`. Users must have * `cloudprivatecatalog.catalogTargets.get` IAM permission on the resource * context in order to access catalogs. `Catalog` contains the resource name and * a subset of data of the original * [google.cloud.privatecatalogproducer.v1beta.Catalog][]. * `Product` is child resource of the catalog. A `Product` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Product][]. * `Version` is child resource of the product. A `Version` contains the resource * name and a subset of the data of the original * [google.cloud.privatecatalogproducer.v1beta.Version][]. * </pre> */ public static final class PrivateCatalogFutureStub extends io.grpc.stub.AbstractFutureStub<PrivateCatalogFutureStub> { private PrivateCatalogFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PrivateCatalogFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PrivateCatalogFutureStub(channel, callOptions); } /** * * * <pre> * Search [Catalog][google.cloud.privatecatalog.v1beta1.Catalog] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse> searchCatalogs(com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getSearchCatalogsMethod(), getCallOptions()), request); } /** * * * <pre> * Search [Product][google.cloud.privatecatalog.v1beta1.Product] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse> searchProducts(com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getSearchProductsMethod(), getCallOptions()), request); } /** * * * <pre> * Search [Version][google.cloud.privatecatalog.v1beta1.Version] resources that consumers have access to, within the * scope of the consumer cloud resource hierarchy context. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse> searchVersions(com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getSearchVersionsMethod(), getCallOptions()), request); } } private static final int METHODID_SEARCH_CATALOGS = 0; private static final int METHODID_SEARCH_PRODUCTS = 1; private static final int METHODID_SEARCH_VERSIONS = 2; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_SEARCH_CATALOGS: serviceImpl.searchCatalogs( (com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse>) responseObserver); break; case METHODID_SEARCH_PRODUCTS: serviceImpl.searchProducts( (com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse>) responseObserver); break; case METHODID_SEARCH_VERSIONS: serviceImpl.searchVersions( (com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getSearchCatalogsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest, com.google.cloud.privatecatalog.v1beta1.SearchCatalogsResponse>( service, METHODID_SEARCH_CATALOGS))) .addMethod( getSearchProductsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.privatecatalog.v1beta1.SearchProductsRequest, com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse>( service, METHODID_SEARCH_PRODUCTS))) .addMethod( getSearchVersionsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest, com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse>( service, METHODID_SEARCH_VERSIONS))) .build(); } private abstract static class PrivateCatalogBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { PrivateCatalogBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("PrivateCatalog"); } } private static final class PrivateCatalogFileDescriptorSupplier extends PrivateCatalogBaseDescriptorSupplier { PrivateCatalogFileDescriptorSupplier() {} } private static final class PrivateCatalogMethodDescriptorSupplier extends PrivateCatalogBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; PrivateCatalogMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (PrivateCatalogGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new PrivateCatalogFileDescriptorSupplier()) .addMethod(getSearchCatalogsMethod()) .addMethod(getSearchProductsMethod()) .addMethod(getSearchVersionsMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
37,130
java-backupdr/proto-google-cloud-backupdr-v1/src/main/java/com/google/cloud/backupdr/v1/ListBackupPlanAssociationsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/backupdr/v1/backupplanassociation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.backupdr.v1; /** * * * <pre> * Request message for List BackupPlanAssociation * </pre> * * Protobuf type {@code google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest} */ public final class ListBackupPlanAssociationsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest) ListBackupPlanAssociationsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListBackupPlanAssociationsRequest.newBuilder() to construct. private ListBackupPlanAssociationsRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListBackupPlanAssociationsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListBackupPlanAssociationsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.backupdr.v1.BackupPlanAssociationProto .internal_static_google_cloud_backupdr_v1_ListBackupPlanAssociationsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.backupdr.v1.BackupPlanAssociationProto .internal_static_google_cloud_backupdr_v1_ListBackupPlanAssociationsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest.class, com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The project and location for which to retrieve backup Plan * Associations information, in the format * `projects/{project_id}/locations/{location}`. In Cloud BackupDR, locations * map to GCP regions, for example **us-central1**. To retrieve backup plan * associations for all locations, use "-" for the * `{location}` value. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The project and location for which to retrieve backup Plan * Associations information, in the format * `projects/{project_id}/locations/{location}`. In Cloud BackupDR, locations * map to GCP regions, for example **us-central1**. To retrieve backup plan * associations for all locations, use "-" for the * `{location}` value. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filtering results * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Filtering results * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest)) { return super.equals(obj); } com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest other = (com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for List BackupPlanAssociation * </pre> * * Protobuf type {@code google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest) com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.backupdr.v1.BackupPlanAssociationProto .internal_static_google_cloud_backupdr_v1_ListBackupPlanAssociationsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.backupdr.v1.BackupPlanAssociationProto .internal_static_google_cloud_backupdr_v1_ListBackupPlanAssociationsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest.class, com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest.Builder.class); } // Construct using com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.backupdr.v1.BackupPlanAssociationProto .internal_static_google_cloud_backupdr_v1_ListBackupPlanAssociationsRequest_descriptor; } @java.lang.Override public com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest getDefaultInstanceForType() { return com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest build() { com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest buildPartial() { com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest result = new com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest) { return mergeFrom((com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest other) { if (other == com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The project and location for which to retrieve backup Plan * Associations information, in the format * `projects/{project_id}/locations/{location}`. In Cloud BackupDR, locations * map to GCP regions, for example **us-central1**. To retrieve backup plan * associations for all locations, use "-" for the * `{location}` value. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The project and location for which to retrieve backup Plan * Associations information, in the format * `projects/{project_id}/locations/{location}`. In Cloud BackupDR, locations * map to GCP regions, for example **us-central1**. To retrieve backup plan * associations for all locations, use "-" for the * `{location}` value. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The project and location for which to retrieve backup Plan * Associations information, in the format * `projects/{project_id}/locations/{location}`. In Cloud BackupDR, locations * map to GCP regions, for example **us-central1**. To retrieve backup plan * associations for all locations, use "-" for the * `{location}` value. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The project and location for which to retrieve backup Plan * Associations information, in the format * `projects/{project_id}/locations/{location}`. In Cloud BackupDR, locations * map to GCP regions, for example **us-central1**. To retrieve backup plan * associations for all locations, use "-" for the * `{location}` value. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The project and location for which to retrieve backup Plan * Associations information, in the format * `projects/{project_id}/locations/{location}`. In Cloud BackupDR, locations * map to GCP regions, for example **us-central1**. To retrieve backup plan * associations for all locations, use "-" for the * `{location}` value. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filtering results * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Filtering results * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Filtering results * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Filtering results * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Filtering results * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest) private static final com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest(); } public static com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListBackupPlanAssociationsRequest> PARSER = new com.google.protobuf.AbstractParser<ListBackupPlanAssociationsRequest>() { @java.lang.Override public ListBackupPlanAssociationsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListBackupPlanAssociationsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListBackupPlanAssociationsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.backupdr.v1.ListBackupPlanAssociationsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-kie-drools
37,534
kie-pmml-trusty/kie-pmml-models/kie-pmml-models-regression/kie-pmml-models-regression-compiler/src/main/java/org/kie/pmml/models/regression/compiler/factories/KiePMMLRegressionTableFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kie.pmml.models.regression.compiler.factories; import java.util.AbstractMap; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import com.github.javaparser.ast.CompilationUnit; import com.github.javaparser.ast.NodeList; import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; import com.github.javaparser.ast.body.MethodDeclaration; import com.github.javaparser.ast.body.Parameter; import com.github.javaparser.ast.body.VariableDeclarator; import com.github.javaparser.ast.expr.CastExpr; import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.LambdaExpr; import com.github.javaparser.ast.expr.MethodCallExpr; import com.github.javaparser.ast.expr.MethodReferenceExpr; import com.github.javaparser.ast.expr.NameExpr; import com.github.javaparser.ast.expr.NullLiteralExpr; import com.github.javaparser.ast.expr.ObjectCreationExpr; import com.github.javaparser.ast.expr.StringLiteralExpr; import com.github.javaparser.ast.expr.VariableDeclarationExpr; import com.github.javaparser.ast.stmt.BlockStmt; import com.github.javaparser.ast.stmt.ExpressionStmt; import com.github.javaparser.ast.type.ClassOrInterfaceType; import com.github.javaparser.ast.type.UnknownType; import org.dmg.pmml.regression.CategoricalPredictor; import org.dmg.pmml.regression.NumericPredictor; import org.dmg.pmml.regression.PredictorTerm; import org.dmg.pmml.regression.RegressionModel; import org.dmg.pmml.regression.RegressionTable; import org.kie.pmml.api.exceptions.KiePMMLException; import org.kie.pmml.api.exceptions.KiePMMLInternalException; import org.kie.pmml.api.iinterfaces.SerializableFunction; import org.kie.pmml.compiler.commons.utils.JavaParserUtils; import org.kie.pmml.models.regression.compiler.dto.RegressionCompilationDTO; import org.kie.pmml.models.regression.model.AbstractKiePMMLTable; import org.kie.pmml.models.regression.model.KiePMMLRegressionTable; import org.kie.pmml.models.regression.model.tuples.KiePMMLTableSourceCategory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.util.stream.Collectors.groupingBy; import static org.kie.pmml.commons.Constants.MISSING_BODY_TEMPLATE; import static org.kie.pmml.commons.Constants.MISSING_VARIABLE_INITIALIZER_TEMPLATE; import static org.kie.pmml.commons.Constants.MISSING_VARIABLE_IN_BODY; import static org.kie.pmml.commons.Constants.TO_RETURN; import static org.kie.pmml.commons.Constants.VARIABLE_NAME_TEMPLATE; import static org.kie.pmml.commons.utils.KiePMMLModelUtils.getSanitizedVariableName; import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.addMapPopulationExpressions; import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.createPopulatedHashMap; import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getChainedMethodCallExprFrom; import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getExpressionForObject; import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getTypedClassOrInterfaceTypeByTypeNames; import static org.kie.pmml.compiler.commons.utils.CommonCodegenUtils.getVariableDeclarator; import static org.kie.pmml.compiler.commons.utils.JavaParserUtils.MAIN_CLASS_NOT_FOUND; import static org.kie.pmml.compiler.commons.utils.JavaParserUtils.getFromFileName; import static org.kie.pmml.compiler.commons.utils.JavaParserUtils.getFullClassName; public class KiePMMLRegressionTableFactory { static final String KIE_PMML_REGRESSION_TABLE_TEMPLATE_JAVA = "KiePMMLRegressionTableTemplate.tmpl"; static final String KIE_PMML_REGRESSION_TABLE_TEMPLATE = "KiePMMLRegressionTableTemplate"; static final String GETKIEPMML_TABLE = "getKiePMMLTable"; static final String NUMERIC_FUNCTION_MAP = "numericFunctionMap"; static final String CATEGORICAL_FUNCTION_MAP = "categoricalFunctionMap"; static final String PREDICTOR_TERM_FUNCTION_MAP = "predictorTermFunctionMap"; static final ClassOrInterfaceDeclaration REGRESSION_TABLE_TEMPLATE; static { CompilationUnit cloneCU = JavaParserUtils.getFromFileName(KIE_PMML_REGRESSION_TABLE_TEMPLATE_JAVA); REGRESSION_TABLE_TEMPLATE = cloneCU.getClassByName(KIE_PMML_REGRESSION_TABLE_TEMPLATE) .orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + KIE_PMML_REGRESSION_TABLE_TEMPLATE)); } static final Logger logger = LoggerFactory.getLogger(KiePMMLRegressionTableFactory.class.getName()); static final String KIE_PMML_EVALUATE_METHOD_TEMPLATE_JAVA = "KiePMMLEvaluateMethodTemplate.tmpl"; static final String KIE_PMML_EVALUATE_METHOD_TEMPLATE = "KiePMMLEvaluateMethodTemplate"; static final List<RegressionModel.NormalizationMethod> SUPPORTED_NORMALIZATION_METHODS = Arrays.asList(RegressionModel.NormalizationMethod.SOFTMAX, RegressionModel.NormalizationMethod.LOGIT, RegressionModel.NormalizationMethod.EXP, RegressionModel.NormalizationMethod.PROBIT, RegressionModel.NormalizationMethod.CLOGLOG, RegressionModel.NormalizationMethod.CAUCHIT, RegressionModel.NormalizationMethod.NONE); static final List<RegressionModel.NormalizationMethod> UNSUPPORTED_NORMALIZATION_METHODS = Arrays.asList( RegressionModel.NormalizationMethod.SIMPLEMAX, RegressionModel.NormalizationMethod.LOGLOG); private static final String COEFFICIENT = "coefficient"; private static AtomicInteger classArity = new AtomicInteger(0); private static AtomicInteger predictorsArity = new AtomicInteger(0); private static CompilationUnit templateEvaluate; private static CompilationUnit cloneEvaluate; private KiePMMLRegressionTableFactory() { // Avoid instantiation } // KiePMMLRegressionTable instantiation public static LinkedHashMap<String, KiePMMLRegressionTable> getRegressionTables(final RegressionCompilationDTO compilationDTO) { logger.trace("getRegressionTables {}", compilationDTO.getRegressionTables()); LinkedHashMap<String, KiePMMLRegressionTable> toReturn = new LinkedHashMap<>(); for (RegressionTable regressionTable : compilationDTO.getRegressionTables()) { final KiePMMLRegressionTable kiePMMLRegressionTable = getRegressionTable(regressionTable, compilationDTO); String targetCategory = regressionTable.getTargetCategory() != null ? regressionTable.getTargetCategory().toString() : ""; toReturn.put(targetCategory, kiePMMLRegressionTable); } return toReturn; } public static KiePMMLRegressionTable getRegressionTable(final RegressionTable regressionTable, final RegressionCompilationDTO compilationDTO) { logger.trace("getRegressionTable {}", regressionTable); final Map<String, SerializableFunction<Double, Double>> numericPredictorsMap = getNumericPredictorsMap(regressionTable.getNumericPredictors()); final Map<String, SerializableFunction<String, Double>> categoricalPredictorsMap = getCategoricalPredictorsMap(regressionTable.getCategoricalPredictors()); final Map<String, SerializableFunction<Map<String, Object>, Double>> predictorTermFunctionMap = getPredictorTermsMap(regressionTable.getPredictorTerms()); final SerializableFunction<Double, Double> resultUpdater = getResultUpdaterFunction(compilationDTO.getDefaultNormalizationMethod()); final Double intercept = regressionTable.getIntercept() != null ? regressionTable.getIntercept().doubleValue() : null; return KiePMMLRegressionTable.builder(UUID.randomUUID().toString(), Collections.emptyList()) .withNumericFunctionMap(numericPredictorsMap) .withCategoricalFunctionMap(categoricalPredictorsMap) .withPredictorTermsFunctionMap(predictorTermFunctionMap) .withResultUpdater(resultUpdater) .withIntercept(intercept) .withTargetField(compilationDTO.getTargetFieldName()) .withTargetCategory(regressionTable.getTargetCategory()) .build(); } // Source code generation public static LinkedHashMap<String, KiePMMLTableSourceCategory> getRegressionTableBuilders(final RegressionCompilationDTO compilationDTO) { logger.trace("getRegressionTables {}", compilationDTO.getRegressionTables()); LinkedHashMap<String, KiePMMLTableSourceCategory> toReturn = new LinkedHashMap<>(); for (RegressionTable regressionTable : compilationDTO.getRegressionTables()) { final Map.Entry<String, String> regressionTableEntry = getRegressionTableBuilder(regressionTable, compilationDTO); String targetCategory = regressionTable.getTargetCategory() != null ? regressionTable.getTargetCategory().toString() : ""; toReturn.put(regressionTableEntry.getKey(), new KiePMMLTableSourceCategory(regressionTableEntry.getValue(), targetCategory)); } return toReturn; } public static Map.Entry<String, String> getRegressionTableBuilder(final RegressionTable regressionTable, final RegressionCompilationDTO compilationDTO) { logger.trace("getRegressionTableBuilder {}", regressionTable); String className = "KiePMMLRegressionTable" + classArity.addAndGet(1); CompilationUnit cloneCU = JavaParserUtils.getKiePMMLModelCompilationUnit(className, compilationDTO.getPackageName(), KIE_PMML_REGRESSION_TABLE_TEMPLATE_JAVA, KIE_PMML_REGRESSION_TABLE_TEMPLATE); ClassOrInterfaceDeclaration tableTemplate = cloneCU.getClassByName(className) .orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className)); final MethodDeclaration staticGetterMethod = tableTemplate.getMethodsByName(GETKIEPMML_TABLE).get(0); setStaticGetter(regressionTable, compilationDTO, staticGetterMethod, className.toLowerCase()); return new AbstractMap.SimpleEntry<>(getFullClassName(cloneCU), cloneCU.toString()); } // not-public KiePMMLRegressionTable instantiation /** * Create <b>NumericPredictor</b>s <code>Map</code>es * * @param numericPredictors * @return */ static Map<String, SerializableFunction<Double, Double>> getNumericPredictorsMap(final List<NumericPredictor> numericPredictors) { return numericPredictors.stream() .collect(Collectors.toMap(numericPredictor ->numericPredictor.getField(), KiePMMLRegressionTableFactory::getNumericPredictorEntry)); } /** * Create a <b>NumericPredictor</b> <code>Entry</code> * * @param numericPredictor * @return */ static SerializableFunction<Double, Double> getNumericPredictorEntry(final NumericPredictor numericPredictor) { boolean withExponent = !Objects.equals(1, numericPredictor.getExponent()); if (withExponent) { return input -> KiePMMLRegressionTable.evaluateNumericWithExponent(input, numericPredictor.getCoefficient().doubleValue(), numericPredictor.getExponent().doubleValue()); } else { return input -> KiePMMLRegressionTable.evaluateNumericWithoutExponent(input, numericPredictor.getCoefficient().doubleValue()); } } /** * Create the <b>CategoricalPredictor</b>s <code>Map</code> * * @param categoricalPredictors * @return */ static Map<String, SerializableFunction<String, Double>> getCategoricalPredictorsMap(final List<CategoricalPredictor> categoricalPredictors) { final Map<String, List<CategoricalPredictor>> groupedCollectors = categoricalPredictors.stream() .collect(groupingBy(categoricalPredictor ->categoricalPredictor.getField())); return groupedCollectors.entrySet().stream() .map(entry -> { Map<String, Double> groupedCategoricalPredictorMap = getGroupedCategoricalPredictorMap(entry.getValue()); SerializableFunction<String, Double> function = input -> KiePMMLRegressionTable.evaluateCategoricalPredictor(input, groupedCategoricalPredictorMap); return new AbstractMap.SimpleEntry<>(entry.getKey(), function); }) .collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue)); } /** * Populate the <code>Map</code> for the given <b>categoricalPredictors</b> * * @param categoricalPredictors * @return */ static Map<String, Double> getGroupedCategoricalPredictorMap(final List<CategoricalPredictor> categoricalPredictors) { final Map<String, Double> toReturn = new LinkedHashMap<>(); for (CategoricalPredictor categoricalPredictor : categoricalPredictors) { toReturn.put(categoricalPredictor.getValue().toString(), categoricalPredictor.getCoefficient().doubleValue()); } return toReturn; } /** * Get the <code>Map</code> of <b>PredictorTerm</b>' <code>VariableDeclarationExpr</code>s * * @param predictorTerms * @return */ static Map<String, SerializableFunction<Map<String, Object>, Double>> getPredictorTermsMap(final List<PredictorTerm> predictorTerms) { predictorsArity.set(0); return predictorTerms.stream() .map(predictorTerm -> { int arity = predictorsArity.addAndGet(1); String variableName = predictorTerm.getName() != null ?predictorTerm.getName() : "predictorTermFunction" + arity; return new AbstractMap.SimpleEntry<>(variableName, getPredictorTermSerializableFunction(predictorTerm)); }) .collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue)); } /** * Get the <b>PredictorTerm</b> <code>SerializableFunction</code> * * @param predictorTerm * @return */ static SerializableFunction<Map<String, Object>, Double> getPredictorTermSerializableFunction(final PredictorTerm predictorTerm) { return resultMap -> { final AtomicReference<Double> result = new AtomicReference<>(1.0); final List<String> fieldRefs = predictorTerm.getFieldRefs().stream() .map(fieldRef ->fieldRef.getField()) .collect(Collectors.toList()); for (Map.Entry<String, Object> entry : resultMap.entrySet()) { if (fieldRefs.contains(entry.getKey())) { result.set(result.get() * (Double) entry.getValue()); } } return result.get() * predictorTerm.getCoefficient().doubleValue(); }; } static SerializableFunction<Double, Double> getResultUpdaterFunction(final RegressionModel.NormalizationMethod normalizationMethod) { if (UNSUPPORTED_NORMALIZATION_METHODS.contains(normalizationMethod)) { return null; } else { return getResultUpdaterSupportedFunction(normalizationMethod); } } /** * Create a <b>resultUpdater</b> <code>CastExpr</code> * * @param normalizationMethod * @return */ static SerializableFunction<Double, Double> getResultUpdaterSupportedFunction(final RegressionModel.NormalizationMethod normalizationMethod) { switch (normalizationMethod) { case SOFTMAX: return AbstractKiePMMLTable::updateSOFTMAXResult; case LOGIT: return AbstractKiePMMLTable::updateLOGITResult; case EXP: return AbstractKiePMMLTable::updateEXPResult; case PROBIT: return AbstractKiePMMLTable::updatePROBITResult; case CLOGLOG: return AbstractKiePMMLTable::updateCLOGLOGResult; case CAUCHIT: return AbstractKiePMMLTable::updateCAUCHITResult; case NONE: return AbstractKiePMMLTable::updateNONEResult; default: throw new KiePMMLException("Unexpected NormalizationMethod " + normalizationMethod); } } // not-public code-generation static void setStaticGetter(final RegressionTable regressionTable, final RegressionCompilationDTO compilationDTO, final MethodDeclaration staticGetterMethod, final String variableName) { final BlockStmt regressionTableBody = staticGetterMethod.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, staticGetterMethod))); final BlockStmt newBody = new BlockStmt(); // populate maps String numericFunctionMapName = String.format(VARIABLE_NAME_TEMPLATE, NUMERIC_FUNCTION_MAP, variableName); final Map<String, Expression> numericPredictorsMap = getNumericPredictorsExpressions(regressionTable.getNumericPredictors()); createPopulatedHashMap(newBody, numericFunctionMapName, Arrays.asList(String.class.getSimpleName(), "SerializableFunction<Double, Double>"), numericPredictorsMap); final Map<String, Expression> categoricalPredictorFunctionsMap = getCategoricalPredictorsExpressions(regressionTable.getCategoricalPredictors(), newBody, variableName); String categoricalFunctionMapName = String.format(VARIABLE_NAME_TEMPLATE, CATEGORICAL_FUNCTION_MAP, variableName); createPopulatedHashMap(newBody, categoricalFunctionMapName, Arrays.asList(String.class.getSimpleName(), "SerializableFunction<String, " + "Double>") , categoricalPredictorFunctionsMap); String predictorTermsFunctionMapName = String.format(VARIABLE_NAME_TEMPLATE, PREDICTOR_TERM_FUNCTION_MAP, variableName); final Map<String, Expression> predictorTermsMap = getPredictorTermFunctions(regressionTable.getPredictorTerms()); createPopulatedHashMap(newBody, predictorTermsFunctionMapName, Arrays.asList(String.class.getSimpleName(), "SerializableFunction<Map" + "<String, " + "Object>, Double>"), predictorTermsMap); final VariableDeclarator variableDeclarator = getVariableDeclarator(regressionTableBody, TO_RETURN).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, TO_RETURN, regressionTableBody))); final MethodCallExpr initializer = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, TO_RETURN, regressionTableBody))) .asMethodCallExpr(); final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer); builder.setArgument(0, new StringLiteralExpr(variableName)); getChainedMethodCallExprFrom("withNumericFunctionMap", initializer).setArgument(0, new NameExpr(numericFunctionMapName) { }); getChainedMethodCallExprFrom("withCategoricalFunctionMap", initializer).setArgument(0, new NameExpr(categoricalFunctionMapName)); getChainedMethodCallExprFrom("withPredictorTermsFunctionMap", initializer).setArgument(0, new NameExpr(predictorTermsFunctionMapName)); getChainedMethodCallExprFrom("withIntercept", initializer).setArgument(0, getExpressionForObject(regressionTable.getIntercept().doubleValue())); getChainedMethodCallExprFrom("withTargetField", initializer).setArgument(0, getExpressionForObject(compilationDTO.getTargetFieldName())); getChainedMethodCallExprFrom("withTargetCategory", initializer).setArgument(0, getExpressionForObject(regressionTable.getTargetCategory())); final Expression resultUpdaterExpression = getResultUpdaterExpression(compilationDTO.getDefaultNormalizationMethod()); getChainedMethodCallExprFrom("withResultUpdater", initializer).setArgument(0, resultUpdaterExpression); regressionTableBody.getStatements().forEach(newBody::addStatement); staticGetterMethod.setBody(newBody); } static Expression getResultUpdaterExpression(final RegressionModel.NormalizationMethod normalizationMethod) { if (UNSUPPORTED_NORMALIZATION_METHODS.contains(normalizationMethod)) { return new NullLiteralExpr(); } else { return getResultUpdaterSupportedExpression(normalizationMethod); } } /** * Create a <b>resultUpdater</b> <code>CastExpr</code> * * @param normalizationMethod * @return */ static MethodReferenceExpr getResultUpdaterSupportedExpression(final RegressionModel.NormalizationMethod normalizationMethod) { final String thisExpressionMethodName = String.format("update%sResult", normalizationMethod.name()); final CastExpr castExpr = new CastExpr(); final String doubleClassName = Double.class.getSimpleName(); final ClassOrInterfaceType consumerType = getTypedClassOrInterfaceTypeByTypeNames(SerializableFunction.class.getCanonicalName(), Arrays.asList(doubleClassName, doubleClassName)); castExpr.setType(consumerType); castExpr.setExpression(KiePMMLRegressionTable.class.getSimpleName()); final MethodReferenceExpr toReturn = new MethodReferenceExpr(); toReturn.setScope(castExpr); toReturn.setIdentifier(thisExpressionMethodName); return toReturn; } /** * Create <b>NumericPredictor</b>s <code>CastExpr</code>es * * @param numericPredictors * @return */ static Map<String, Expression> getNumericPredictorsExpressions(final List<NumericPredictor> numericPredictors) { return numericPredictors.stream() .collect(Collectors.toMap(numericPredictor ->numericPredictor.getField(), KiePMMLRegressionTableFactory::getNumericPredictorExpression)); } /** * Create a <b>NumericPredictor</b> <code>CastExpr</code> * * @param numericPredictor * @return */ static CastExpr getNumericPredictorExpression(final NumericPredictor numericPredictor) { boolean withExponent = !Objects.equals(1, numericPredictor.getExponent()); final String lambdaExpressionMethodName = withExponent ? "evaluateNumericWithExponent" : "evaluateNumericWithoutExponent"; final String parameterName = "input"; final MethodCallExpr lambdaMethodCallExpr = new MethodCallExpr(); lambdaMethodCallExpr.setName(lambdaExpressionMethodName); lambdaMethodCallExpr.setScope(new NameExpr(KiePMMLRegressionTable.class.getSimpleName())); final NodeList<Expression> arguments = new NodeList<>(); arguments.add(0, new NameExpr(parameterName)); arguments.add(1, getExpressionForObject(numericPredictor.getCoefficient().doubleValue())); if (withExponent) { arguments.add(2, getExpressionForObject(numericPredictor.getExponent().doubleValue())); } lambdaMethodCallExpr.setArguments(arguments); final ExpressionStmt lambdaExpressionStmt = new ExpressionStmt(lambdaMethodCallExpr); final LambdaExpr lambdaExpr = new LambdaExpr(); final Parameter lambdaParameter = new Parameter(new UnknownType(), parameterName); lambdaExpr.setParameters(NodeList.nodeList(lambdaParameter)); lambdaExpr.setBody(lambdaExpressionStmt); final String doubleClassName = Double.class.getSimpleName(); final ClassOrInterfaceType serializableFunctionType = getTypedClassOrInterfaceTypeByTypeNames(SerializableFunction.class.getCanonicalName(), Arrays.asList(doubleClassName, doubleClassName)); final CastExpr toReturn = new CastExpr(); toReturn.setType(serializableFunctionType); toReturn.setExpression(lambdaExpr); return toReturn; } /** * Create the <b>CategoricalPredictor</b>s lambda <code>Expression</code>s map * * @param categoricalPredictors * @param body * @return */ static Map<String, Expression> getCategoricalPredictorsExpressions(final List<CategoricalPredictor> categoricalPredictors, final BlockStmt body, final String variableName) { final Map<String, List<CategoricalPredictor>> groupedCollectors = categoricalPredictors.stream() .collect(groupingBy(categoricalPredictor ->categoricalPredictor.getField())); final String categoricalPredictorMapNameBase = getSanitizedVariableName(String.format("%sMap", variableName)); final AtomicInteger counter = new AtomicInteger(); return groupedCollectors.entrySet().stream() .map(entry -> { final String categoricalPredictorMapName = String.format(VARIABLE_NAME_TEMPLATE, categoricalPredictorMapNameBase, counter.getAndIncrement()); populateWithGroupedCategoricalPredictorMap(entry.getValue(), body, categoricalPredictorMapName); return new AbstractMap.SimpleEntry<>(entry.getKey(), getCategoricalPredictorExpression(categoricalPredictorMapName)); }) .collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue)); } /** * Populate the given <b>body</b> with the creation of a <code>Map</code> for the given <b>categoricalPredictors</b> * * @param categoricalPredictors * @param toPopulate * @param categoricalPredictorMapName * @return */ static void populateWithGroupedCategoricalPredictorMap(final List<CategoricalPredictor> categoricalPredictors, final BlockStmt toPopulate, final String categoricalPredictorMapName) { final VariableDeclarator categoricalMapDeclarator = new VariableDeclarator(getTypedClassOrInterfaceTypeByTypeNames(Map.class.getName(), Arrays.asList(String.class.getSimpleName(), Double.class.getSimpleName())), categoricalPredictorMapName); final ObjectCreationExpr categoricalMapInitializer = new ObjectCreationExpr(); categoricalMapInitializer.setType(getTypedClassOrInterfaceTypeByTypeNames(HashMap.class.getName(), Arrays.asList(String.class.getSimpleName(), Double.class.getSimpleName()))); categoricalMapDeclarator.setInitializer(categoricalMapInitializer); final VariableDeclarationExpr categoricalMapDeclarationExpr = new VariableDeclarationExpr(categoricalMapDeclarator); toPopulate.addStatement(categoricalMapDeclarationExpr); final Map<String, Expression> mapExpressions = new LinkedHashMap<>(); categoricalPredictors.forEach(categoricalPredictor -> mapExpressions.put(categoricalPredictor.getValue().toString(), getExpressionForObject(categoricalPredictor.getCoefficient().doubleValue()))); addMapPopulationExpressions(mapExpressions, toPopulate, categoricalPredictorMapName); } /** * Create <b>CategoricalPredictor</b> <code>CastExpr</code> to the class * * @param categoricalPredictorMapName * @return */ static CastExpr getCategoricalPredictorExpression(final String categoricalPredictorMapName) { final String lambdaExpressionMethodName = "evaluateCategoricalPredictor"; final String parameterName = "input"; final MethodCallExpr lambdaMethodCallExpr = new MethodCallExpr(); lambdaMethodCallExpr.setName(lambdaExpressionMethodName); final NodeList<Expression> arguments = new NodeList<>(); arguments.add(0, new NameExpr(parameterName)); arguments.add(1, new NameExpr(categoricalPredictorMapName)); lambdaMethodCallExpr.setArguments(arguments); final ExpressionStmt lambdaExpressionStmt = new ExpressionStmt(lambdaMethodCallExpr); final LambdaExpr lambdaExpr = new LambdaExpr(); final Parameter lambdaParameter = new Parameter(new UnknownType(), parameterName); lambdaExpr.setParameters(NodeList.nodeList(lambdaParameter)); lambdaExpr.setBody(lambdaExpressionStmt); lambdaMethodCallExpr.setScope(new NameExpr(KiePMMLRegressionTable.class.getSimpleName())); final ClassOrInterfaceType serializableFunctionType = getTypedClassOrInterfaceTypeByTypeNames(SerializableFunction.class.getCanonicalName(), Arrays.asList(String.class.getSimpleName(), Double.class.getSimpleName())); final CastExpr toReturn = new CastExpr(); toReturn.setType(serializableFunctionType); toReturn.setExpression(lambdaExpr); return toReturn; } /** * Get the <code>Map</code> of <b>PredictorTerm</b>' <code>VariableDeclarationExpr</code>s * * @param predictorTerms * @return */ static Map<String, Expression> getPredictorTermFunctions(final List<PredictorTerm> predictorTerms) { predictorsArity.set(0); return predictorTerms.stream() .map(predictorTerm -> { int arity = predictorsArity.addAndGet(1); String variableName = predictorTerm.getName() != null ?predictorTerm.getName() : "predictorTermFunction" + arity; return new AbstractMap.SimpleEntry<>(variableName, getPredictorTermFunction(predictorTerm)); }) .collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue)); } /** * Get the <b>PredictorTerm</b> <code>VariableDeclarationExpr</code> * * @param predictorTerm * @return */ static LambdaExpr getPredictorTermFunction(final PredictorTerm predictorTerm) { try { LambdaExpr toReturn = new LambdaExpr(); toReturn.setParameters(NodeList.nodeList(new Parameter(new UnknownType(), "resultMap"))); final BlockStmt body = getPredictorTermBody(predictorTerm); toReturn.setBody(body); return toReturn; } catch (Exception e) { throw new KiePMMLInternalException(String.format("Failed to get PredictorTermFunction for %s", predictorTerm), e); } } /** * Add a <b>PredictorTerm</b> <code>MethodDeclaration</code> to the class * * @param predictorTerm * @return */ static BlockStmt getPredictorTermBody(final PredictorTerm predictorTerm) { try { templateEvaluate = getFromFileName(KIE_PMML_EVALUATE_METHOD_TEMPLATE_JAVA); cloneEvaluate = templateEvaluate.clone(); ClassOrInterfaceDeclaration evaluateTemplateClass = cloneEvaluate.getClassByName(KIE_PMML_EVALUATE_METHOD_TEMPLATE) .orElseThrow(() -> new RuntimeException(MAIN_CLASS_NOT_FOUND)); MethodDeclaration methodTemplate = evaluateTemplateClass.getMethodsByName("evaluatePredictor").get(0); final BlockStmt body = methodTemplate.getBody().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_BODY_TEMPLATE, methodTemplate.getName()))); VariableDeclarator variableDeclarator = getVariableDeclarator(body, "fieldRefs") .orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_VARIABLE_IN_BODY, "fieldRefs", body))); final List<Expression> nodeList = predictorTerm.getFieldRefs().stream() .map(fieldRef -> new StringLiteralExpr(fieldRef.getField())) .collect(Collectors.toList()); NodeList<Expression> expressions = NodeList.nodeList(nodeList); MethodCallExpr methodCallExpr = new MethodCallExpr(new NameExpr("Arrays"), "asList", expressions); variableDeclarator.setInitializer(methodCallExpr); variableDeclarator = getVariableDeclarator(body, COEFFICIENT) .orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_VARIABLE_IN_BODY, COEFFICIENT, body))); variableDeclarator.setInitializer(String.valueOf(predictorTerm.getCoefficient().doubleValue())); return methodTemplate.getBody().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_BODY_TEMPLATE, methodTemplate.getName()))); } catch (Exception e) { throw new KiePMMLInternalException(String.format("Failed to add PredictorTerm %s", predictorTerm), e); } } }
googleapis/google-cloud-java
37,197
java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/WriteUserEventRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1/user_event_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1; /** * * * <pre> * Request message for WriteUserEvent method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.WriteUserEventRequest} */ public final class WriteUserEventRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.WriteUserEventRequest) WriteUserEventRequestOrBuilder { private static final long serialVersionUID = 0L; // Use WriteUserEventRequest.newBuilder() to construct. private WriteUserEventRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WriteUserEventRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new WriteUserEventRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.UserEventServiceProto .internal_static_google_cloud_discoveryengine_v1_WriteUserEventRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.UserEventServiceProto .internal_static_google_cloud_discoveryengine_v1_WriteUserEventRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.WriteUserEventRequest.class, com.google.cloud.discoveryengine.v1.WriteUserEventRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name. * If the write user event action is applied in * [DataStore][google.cloud.discoveryengine.v1.DataStore] level, the format * is: * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`. * If the write user event action is applied in * [Location][google.cloud.location.Location] level, for example, the event * with [Document][google.cloud.discoveryengine.v1.Document] across multiple * [DataStore][google.cloud.discoveryengine.v1.DataStore], the format is: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource name. * If the write user event action is applied in * [DataStore][google.cloud.discoveryengine.v1.DataStore] level, the format * is: * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`. * If the write user event action is applied in * [Location][google.cloud.location.Location] level, for example, the event * with [Document][google.cloud.discoveryengine.v1.Document] across multiple * [DataStore][google.cloud.discoveryengine.v1.DataStore], the format is: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int USER_EVENT_FIELD_NUMBER = 2; private com.google.cloud.discoveryengine.v1.UserEvent userEvent_; /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userEvent field is set. */ @java.lang.Override public boolean hasUserEvent() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userEvent. */ @java.lang.Override public com.google.cloud.discoveryengine.v1.UserEvent getUserEvent() { return userEvent_ == null ? com.google.cloud.discoveryengine.v1.UserEvent.getDefaultInstance() : userEvent_; } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1.UserEventOrBuilder getUserEventOrBuilder() { return userEvent_ == null ? com.google.cloud.discoveryengine.v1.UserEvent.getDefaultInstance() : userEvent_; } public static final int WRITE_ASYNC_FIELD_NUMBER = 3; private boolean writeAsync_ = false; /** * * * <pre> * If set to true, the user event is written asynchronously after * validation, and the API responds without waiting for the write. * </pre> * * <code>bool write_async = 3;</code> * * @return The writeAsync. */ @java.lang.Override public boolean getWriteAsync() { return writeAsync_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUserEvent()); } if (writeAsync_ != false) { output.writeBool(3, writeAsync_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUserEvent()); } if (writeAsync_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, writeAsync_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1.WriteUserEventRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1.WriteUserEventRequest other = (com.google.cloud.discoveryengine.v1.WriteUserEventRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasUserEvent() != other.hasUserEvent()) return false; if (hasUserEvent()) { if (!getUserEvent().equals(other.getUserEvent())) return false; } if (getWriteAsync() != other.getWriteAsync()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasUserEvent()) { hash = (37 * hash) + USER_EVENT_FIELD_NUMBER; hash = (53 * hash) + getUserEvent().hashCode(); } hash = (37 * hash) + WRITE_ASYNC_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getWriteAsync()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1.WriteUserEventRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for WriteUserEvent method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.WriteUserEventRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.WriteUserEventRequest) com.google.cloud.discoveryengine.v1.WriteUserEventRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.UserEventServiceProto .internal_static_google_cloud_discoveryengine_v1_WriteUserEventRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.UserEventServiceProto .internal_static_google_cloud_discoveryengine_v1_WriteUserEventRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.WriteUserEventRequest.class, com.google.cloud.discoveryengine.v1.WriteUserEventRequest.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1.WriteUserEventRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUserEventFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; userEvent_ = null; if (userEventBuilder_ != null) { userEventBuilder_.dispose(); userEventBuilder_ = null; } writeAsync_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1.UserEventServiceProto .internal_static_google_cloud_discoveryengine_v1_WriteUserEventRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1.WriteUserEventRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1.WriteUserEventRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1.WriteUserEventRequest build() { com.google.cloud.discoveryengine.v1.WriteUserEventRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1.WriteUserEventRequest buildPartial() { com.google.cloud.discoveryengine.v1.WriteUserEventRequest result = new com.google.cloud.discoveryengine.v1.WriteUserEventRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.discoveryengine.v1.WriteUserEventRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.userEvent_ = userEventBuilder_ == null ? userEvent_ : userEventBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.writeAsync_ = writeAsync_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1.WriteUserEventRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1.WriteUserEventRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1.WriteUserEventRequest other) { if (other == com.google.cloud.discoveryengine.v1.WriteUserEventRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasUserEvent()) { mergeUserEvent(other.getUserEvent()); } if (other.getWriteAsync() != false) { setWriteAsync(other.getWriteAsync()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUserEventFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { writeAsync_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name. * If the write user event action is applied in * [DataStore][google.cloud.discoveryengine.v1.DataStore] level, the format * is: * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`. * If the write user event action is applied in * [Location][google.cloud.location.Location] level, for example, the event * with [Document][google.cloud.discoveryengine.v1.Document] across multiple * [DataStore][google.cloud.discoveryengine.v1.DataStore], the format is: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource name. * If the write user event action is applied in * [DataStore][google.cloud.discoveryengine.v1.DataStore] level, the format * is: * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`. * If the write user event action is applied in * [Location][google.cloud.location.Location] level, for example, the event * with [Document][google.cloud.discoveryengine.v1.Document] across multiple * [DataStore][google.cloud.discoveryengine.v1.DataStore], the format is: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource name. * If the write user event action is applied in * [DataStore][google.cloud.discoveryengine.v1.DataStore] level, the format * is: * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`. * If the write user event action is applied in * [Location][google.cloud.location.Location] level, for example, the event * with [Document][google.cloud.discoveryengine.v1.Document] across multiple * [DataStore][google.cloud.discoveryengine.v1.DataStore], the format is: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent resource name. * If the write user event action is applied in * [DataStore][google.cloud.discoveryengine.v1.DataStore] level, the format * is: * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`. * If the write user event action is applied in * [Location][google.cloud.location.Location] level, for example, the event * with [Document][google.cloud.discoveryengine.v1.Document] across multiple * [DataStore][google.cloud.discoveryengine.v1.DataStore], the format is: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent resource name. * If the write user event action is applied in * [DataStore][google.cloud.discoveryengine.v1.DataStore] level, the format * is: * `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`. * If the write user event action is applied in * [Location][google.cloud.location.Location] level, for example, the event * with [Document][google.cloud.discoveryengine.v1.Document] across multiple * [DataStore][google.cloud.discoveryengine.v1.DataStore], the format is: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.discoveryengine.v1.UserEvent userEvent_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1.UserEvent, com.google.cloud.discoveryengine.v1.UserEvent.Builder, com.google.cloud.discoveryengine.v1.UserEventOrBuilder> userEventBuilder_; /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userEvent field is set. */ public boolean hasUserEvent() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userEvent. */ public com.google.cloud.discoveryengine.v1.UserEvent getUserEvent() { if (userEventBuilder_ == null) { return userEvent_ == null ? com.google.cloud.discoveryengine.v1.UserEvent.getDefaultInstance() : userEvent_; } else { return userEventBuilder_.getMessage(); } } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserEvent(com.google.cloud.discoveryengine.v1.UserEvent value) { if (userEventBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userEvent_ = value; } else { userEventBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserEvent( com.google.cloud.discoveryengine.v1.UserEvent.Builder builderForValue) { if (userEventBuilder_ == null) { userEvent_ = builderForValue.build(); } else { userEventBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUserEvent(com.google.cloud.discoveryengine.v1.UserEvent value) { if (userEventBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && userEvent_ != null && userEvent_ != com.google.cloud.discoveryengine.v1.UserEvent.getDefaultInstance()) { getUserEventBuilder().mergeFrom(value); } else { userEvent_ = value; } } else { userEventBuilder_.mergeFrom(value); } if (userEvent_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUserEvent() { bitField0_ = (bitField0_ & ~0x00000002); userEvent_ = null; if (userEventBuilder_ != null) { userEventBuilder_.dispose(); userEventBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1.UserEvent.Builder getUserEventBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUserEventFieldBuilder().getBuilder(); } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1.UserEventOrBuilder getUserEventOrBuilder() { if (userEventBuilder_ != null) { return userEventBuilder_.getMessageOrBuilder(); } else { return userEvent_ == null ? com.google.cloud.discoveryengine.v1.UserEvent.getDefaultInstance() : userEvent_; } } /** * * * <pre> * Required. User event to write. * </pre> * * <code> * optional .google.cloud.discoveryengine.v1.UserEvent user_event = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1.UserEvent, com.google.cloud.discoveryengine.v1.UserEvent.Builder, com.google.cloud.discoveryengine.v1.UserEventOrBuilder> getUserEventFieldBuilder() { if (userEventBuilder_ == null) { userEventBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1.UserEvent, com.google.cloud.discoveryengine.v1.UserEvent.Builder, com.google.cloud.discoveryengine.v1.UserEventOrBuilder>( getUserEvent(), getParentForChildren(), isClean()); userEvent_ = null; } return userEventBuilder_; } private boolean writeAsync_; /** * * * <pre> * If set to true, the user event is written asynchronously after * validation, and the API responds without waiting for the write. * </pre> * * <code>bool write_async = 3;</code> * * @return The writeAsync. */ @java.lang.Override public boolean getWriteAsync() { return writeAsync_; } /** * * * <pre> * If set to true, the user event is written asynchronously after * validation, and the API responds without waiting for the write. * </pre> * * <code>bool write_async = 3;</code> * * @param value The writeAsync to set. * @return This builder for chaining. */ public Builder setWriteAsync(boolean value) { writeAsync_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * If set to true, the user event is written asynchronously after * validation, and the API responds without waiting for the write. * </pre> * * <code>bool write_async = 3;</code> * * @return This builder for chaining. */ public Builder clearWriteAsync() { bitField0_ = (bitField0_ & ~0x00000004); writeAsync_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.WriteUserEventRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.WriteUserEventRequest) private static final com.google.cloud.discoveryengine.v1.WriteUserEventRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.WriteUserEventRequest(); } public static com.google.cloud.discoveryengine.v1.WriteUserEventRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<WriteUserEventRequest> PARSER = new com.google.protobuf.AbstractParser<WriteUserEventRequest>() { @java.lang.Override public WriteUserEventRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<WriteUserEventRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<WriteUserEventRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1.WriteUserEventRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/nifi
37,042
nifi-registry/nifi-registry-core/nifi-registry-framework/src/main/java/org/apache/nifi/registry/service/extension/docs/HtmlExtensionDocWriter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.registry.service.extension.docs; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.extension.manifest.AllowableValue; import org.apache.nifi.extension.manifest.ControllerServiceDefinition; import org.apache.nifi.extension.manifest.DeprecationNotice; import org.apache.nifi.extension.manifest.DynamicProperty; import org.apache.nifi.extension.manifest.ExpressionLanguageScope; import org.apache.nifi.extension.manifest.Extension; import org.apache.nifi.extension.manifest.InputRequirement; import org.apache.nifi.extension.manifest.Property; import org.apache.nifi.extension.manifest.ProvidedServiceAPI; import org.apache.nifi.extension.manifest.Restricted; import org.apache.nifi.extension.manifest.Restriction; import org.apache.nifi.extension.manifest.Stateful; import org.apache.nifi.extension.manifest.SystemResourceConsideration; import org.apache.nifi.registry.extension.bundle.BundleInfo; import org.apache.nifi.registry.extension.component.ExtensionMetadata; import org.springframework.stereotype.Service; import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import java.io.IOException; import java.io.OutputStream; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.apache.nifi.registry.service.extension.docs.DocumentationConstants.CSS_PATH; @Service public class HtmlExtensionDocWriter implements ExtensionDocWriter { @Override public void write(final ExtensionMetadata extensionMetadata, final Extension extension, final OutputStream outputStream) throws IOException { try { final XMLStreamWriter xmlStreamWriter = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream, "UTF-8"); xmlStreamWriter.writeDTD("<!DOCTYPE html>"); xmlStreamWriter.writeStartElement("html"); xmlStreamWriter.writeAttribute("lang", "en"); writeHead(extensionMetadata, xmlStreamWriter); writeBody(extensionMetadata, extension, xmlStreamWriter); xmlStreamWriter.writeEndElement(); xmlStreamWriter.close(); outputStream.flush(); } catch (XMLStreamException | FactoryConfigurationError e) { throw new IOException("Unable to create XMLOutputStream", e); } } private void writeHead(final ExtensionMetadata extensionMetadata, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { xmlStreamWriter.writeStartElement("head"); xmlStreamWriter.writeStartElement("meta"); xmlStreamWriter.writeAttribute("charset", "utf-8"); xmlStreamWriter.writeEndElement(); writeSimpleElement(xmlStreamWriter, "title", extensionMetadata.getDisplayName()); final String componentUsageCss = CSS_PATH + "component-usage.css"; xmlStreamWriter.writeStartElement("link"); xmlStreamWriter.writeAttribute("rel", "stylesheet"); xmlStreamWriter.writeAttribute("href", componentUsageCss); xmlStreamWriter.writeAttribute("type", "text/css"); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("script"); xmlStreamWriter.writeAttribute("type", "text/javascript"); xmlStreamWriter.writeCharacters("window.onload = function(){if(self==top) { " + "document.getElementById('nameHeader').style.display = \"inherit\"; } }" ); xmlStreamWriter.writeEndElement(); } private void writeBody(final ExtensionMetadata extensionMetadata, final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { xmlStreamWriter.writeStartElement("body"); writeHeader(extensionMetadata, extension, xmlStreamWriter); writeBundleInfo(extensionMetadata, xmlStreamWriter); writeDeprecationWarning(extension, xmlStreamWriter); writeDescription(extensionMetadata, extension, xmlStreamWriter); writeTags(extension, xmlStreamWriter); writeProperties(extension, xmlStreamWriter); writeDynamicProperties(extension, xmlStreamWriter); writeAdditionalBodyInfo(extension, xmlStreamWriter); writeStatefulInfo(extension, xmlStreamWriter); writeRestrictedInfo(extension, xmlStreamWriter); writeInputRequirementInfo(extension, xmlStreamWriter); writeSystemResourceConsiderationInfo(extension, xmlStreamWriter); writeProvidedServiceApis(extension, xmlStreamWriter); writeSeeAlso(extension, xmlStreamWriter); // end body xmlStreamWriter.writeEndElement(); } /** * This method may be overridden by sub classes to write additional * information to the body of the documentation. * * @param extension the component to describe * @param xmlStreamWriter the stream writer * @throws XMLStreamException thrown if there was a problem writing to the XML stream */ protected void writeAdditionalBodyInfo(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { } private void writeHeader(final ExtensionMetadata extensionMetadata, final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { xmlStreamWriter.writeStartElement("h1"); xmlStreamWriter.writeAttribute("id", "nameHeader"); xmlStreamWriter.writeAttribute("style", "display: none;"); xmlStreamWriter.writeCharacters(extensionMetadata.getDisplayName()); xmlStreamWriter.writeEndElement(); } private void writeBundleInfoString(final ExtensionMetadata extensionMetadata, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final BundleInfo bundleInfo = extensionMetadata.getBundleInfo(); final String bundleInfoText = bundleInfo.getGroupId() + "-" + bundleInfo.getArtifactId() + "-" + bundleInfo.getVersion(); xmlStreamWriter.writeStartElement("p"); xmlStreamWriter.writeStartElement("i"); xmlStreamWriter.writeCharacters(bundleInfoText); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); } private void writeBundleInfo(final ExtensionMetadata extensionMetadata, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final BundleInfo bundleInfo = extensionMetadata.getBundleInfo(); final String extenstionType = switch (extensionMetadata.getType()) { case PROCESSOR -> "Processor"; case CONTROLLER_SERVICE -> "Controller Service"; case REPORTING_TASK -> "Reporting Task"; default -> throw new IllegalArgumentException("Unknown extension type: " + extensionMetadata.getType()); }; xmlStreamWriter.writeStartElement("table"); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "th", "Extension Info"); writeSimpleElement(xmlStreamWriter, "th", "Value"); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", "Full Name", true, "bundle-info"); writeSimpleElement(xmlStreamWriter, "td", extensionMetadata.getName()); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", "Type", true, "bundle-info"); writeSimpleElement(xmlStreamWriter, "td", extenstionType); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", "Bundle Group", true, "bundle-info"); writeSimpleElement(xmlStreamWriter, "td", bundleInfo.getGroupId()); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", "Bundle Artifact", true, "bundle-info"); writeSimpleElement(xmlStreamWriter, "td", bundleInfo.getArtifactId()); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", "Bundle Version", true, "bundle-info"); writeSimpleElement(xmlStreamWriter, "td", bundleInfo.getVersion()); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", "Bundle Type", true, "bundle-info"); writeSimpleElement(xmlStreamWriter, "td", bundleInfo.getBundleType().toString()); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", "System API Version", true, "bundle-info"); writeSimpleElement(xmlStreamWriter, "td", bundleInfo.getSystemApiVersion()); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); // end table } private void writeDeprecationWarning(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final DeprecationNotice deprecationNotice = extension.getDeprecationNotice(); if (deprecationNotice != null) { xmlStreamWriter.writeStartElement("h2"); xmlStreamWriter.writeCharacters("Deprecation notice: "); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("p"); xmlStreamWriter.writeCharacters(""); if (!StringUtils.isEmpty(deprecationNotice.getReason())) { xmlStreamWriter.writeCharacters(deprecationNotice.getReason()); } else { xmlStreamWriter.writeCharacters("Please be aware this processor is deprecated and may be removed in the near future."); } xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("p"); xmlStreamWriter.writeCharacters("Please consider using one of the following alternatives: "); final List<String> alternatives = deprecationNotice.getAlternatives(); if (alternatives != null && !alternatives.isEmpty()) { xmlStreamWriter.writeStartElement("ul"); for (final String alternative : alternatives) { xmlStreamWriter.writeStartElement("li"); xmlStreamWriter.writeCharacters(alternative); xmlStreamWriter.writeEndElement(); } xmlStreamWriter.writeEndElement(); } else { xmlStreamWriter.writeCharacters("No alternative components suggested."); } xmlStreamWriter.writeEndElement(); } } private void writeDescription(final ExtensionMetadata extensionMetadata, final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final String description = StringUtils.isBlank(extension.getDescription()) ? "No description provided." : extension.getDescription(); writeSimpleElement(xmlStreamWriter, "h2", "Description: "); writeSimpleElement(xmlStreamWriter, "p", description); if (extensionMetadata.getHasAdditionalDetails()) { xmlStreamWriter.writeStartElement("p"); final BundleInfo bundleInfo = extensionMetadata.getBundleInfo(); final String bucketName = bundleInfo.getBucketName(); final String groupId = bundleInfo.getGroupId(); final String artifactId = bundleInfo.getArtifactId(); final String version = bundleInfo.getVersion(); final String extensionName = extensionMetadata.getName(); final String additionalDetailsPath = "/nifi-registry-api/extension-repository/" + bucketName + "/" + groupId + "/" + artifactId + "/" + version + "/extensions/" + extensionName + "/docs/additional-details"; writeLink(xmlStreamWriter, "Additional Details...", additionalDetailsPath); xmlStreamWriter.writeEndElement(); } } private void writeTags(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final List<String> tags = extension.getTags(); xmlStreamWriter.writeStartElement("h3"); xmlStreamWriter.writeCharacters("Tags: "); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("p"); if (tags != null) { final String tagString = StringUtils.join(tags, ", "); xmlStreamWriter.writeCharacters(tagString); } else { xmlStreamWriter.writeCharacters("No tags provided."); } xmlStreamWriter.writeEndElement(); } protected void writeProperties(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final List<Property> properties = extension.getProperties(); writeSimpleElement(xmlStreamWriter, "h3", "Properties: "); if (properties != null && !properties.isEmpty()) { final boolean containsExpressionLanguage = containsExpressionLanguage(extension); final boolean containsSensitiveProperties = containsSensitiveProperties(extension); xmlStreamWriter.writeStartElement("p"); xmlStreamWriter.writeCharacters("In the list below, the names of required properties appear in "); writeSimpleElement(xmlStreamWriter, "strong", "bold"); xmlStreamWriter.writeCharacters(". Any other properties (not in bold) are considered optional. " + "The table also indicates any default values"); if (containsExpressionLanguage) { if (!containsSensitiveProperties) { xmlStreamWriter.writeCharacters(", and "); } else { xmlStreamWriter.writeCharacters(", "); } xmlStreamWriter.writeCharacters("whether a property supports the NiFi Expression Language"); } if (containsSensitiveProperties) { xmlStreamWriter.writeCharacters(", and whether a property is considered " + "\"sensitive\", meaning that its value will be encrypted"); } xmlStreamWriter.writeCharacters("."); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("table"); xmlStreamWriter.writeAttribute("id", "properties"); // write the header row xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "th", "Name"); writeSimpleElement(xmlStreamWriter, "th", "Default Value"); writeSimpleElement(xmlStreamWriter, "th", "Allowable Values"); writeSimpleElement(xmlStreamWriter, "th", "Description"); xmlStreamWriter.writeEndElement(); // write the individual properties for (Property property : properties) { xmlStreamWriter.writeStartElement("tr"); xmlStreamWriter.writeStartElement("td"); xmlStreamWriter.writeAttribute("id", "name"); if (property.isRequired()) { writeSimpleElement(xmlStreamWriter, "strong", property.getDisplayName()); } else { xmlStreamWriter.writeCharacters(property.getDisplayName()); } xmlStreamWriter.writeEndElement(); writeSimpleElement(xmlStreamWriter, "td", property.getDefaultValue(), false, "default-value"); xmlStreamWriter.writeStartElement("td"); xmlStreamWriter.writeAttribute("id", "allowable-values"); writeValidValues(xmlStreamWriter, property); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("td"); xmlStreamWriter.writeAttribute("id", "description"); if (property.getDescription() != null && !property.getDescription().isBlank()) { xmlStreamWriter.writeCharacters(property.getDescription()); } else { xmlStreamWriter.writeCharacters("No Description Provided."); } if (property.isSensitive()) { xmlStreamWriter.writeEmptyElement("br"); writeSimpleElement(xmlStreamWriter, "strong", "Sensitive Property: true"); } if (property.isExpressionLanguageSupported()) { xmlStreamWriter.writeEmptyElement("br"); StringBuilder text = new StringBuilder("Supports Expression Language: true"); final String perFF = " (will be evaluated using flow file attributes and Environment variables)"; final String registry = " (will be evaluated using Environment variables only)"; final InputRequirement inputRequirement = extension.getInputRequirement(); switch (property.getExpressionLanguageScope()) { case FLOWFILE_ATTRIBUTES: if (inputRequirement != null && inputRequirement.equals(InputRequirement.INPUT_FORBIDDEN)) { text.append(registry); } else { text.append(perFF); } break; case ENVIRONMENT: text.append(registry); break; case NONE: // in case legacy/deprecated method has been used to specify EL support text.append(" (undefined scope)"); break; } writeSimpleElement(xmlStreamWriter, "strong", text.toString()); } xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); } xmlStreamWriter.writeEndElement(); } else { writeSimpleElement(xmlStreamWriter, "p", "This component has no required or optional properties."); } } private boolean containsExpressionLanguage(final Extension extension) { for (Property property : extension.getProperties()) { if (property.isExpressionLanguageSupported()) { return true; } } return false; } private boolean containsSensitiveProperties(final Extension extension) { for (Property property : extension.getProperties()) { if (property.isSensitive()) { return true; } } return false; } protected void writeValidValues(final XMLStreamWriter xmlStreamWriter, final Property property) throws XMLStreamException { if (property.getAllowableValues() != null && !property.getAllowableValues().isEmpty()) { xmlStreamWriter.writeStartElement("ul"); for (AllowableValue value : property.getAllowableValues()) { xmlStreamWriter.writeStartElement("li"); xmlStreamWriter.writeCharacters(value.getDisplayName()); if (!StringUtils.isBlank(value.getDescription())) { writeValidValueDescription(xmlStreamWriter, value.getDescription()); } xmlStreamWriter.writeEndElement(); } xmlStreamWriter.writeEndElement(); } else if (property.getControllerServiceDefinition() != null) { final ControllerServiceDefinition serviceDefinition = property.getControllerServiceDefinition(); final String controllerServiceClass = getSimpleName(serviceDefinition.getClassName()); final String group = serviceDefinition.getGroupId() == null ? "unknown" : serviceDefinition.getGroupId(); final String artifact = serviceDefinition.getArtifactId() == null ? "unknown" : serviceDefinition.getArtifactId(); final String version = serviceDefinition.getVersion() == null ? "unknown" : serviceDefinition.getVersion(); writeSimpleElement(xmlStreamWriter, "strong", "Controller Service API: "); xmlStreamWriter.writeEmptyElement("br"); xmlStreamWriter.writeCharacters(controllerServiceClass); writeValidValueDescription(xmlStreamWriter, group + "-" + artifact + "-" + version); } } private String getSimpleName(final String extensionName) { int index = extensionName.lastIndexOf('.'); if (index > 0 && (index < (extensionName.length() - 1))) { return extensionName.substring(index + 1); } else { return extensionName; } } private void writeValidValueDescription(final XMLStreamWriter xmlStreamWriter, final String description) throws XMLStreamException { xmlStreamWriter.writeCharacters(" "); xmlStreamWriter.writeStartElement("img"); xmlStreamWriter.writeAttribute("src", "/nifi-registry-docs/images/iconInfo.png"); xmlStreamWriter.writeAttribute("alt", description); xmlStreamWriter.writeAttribute("title", description); xmlStreamWriter.writeEndElement(); } private void writeDynamicProperties(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final List<DynamicProperty> dynamicProperties = extension.getDynamicProperties(); if (dynamicProperties != null && !dynamicProperties.isEmpty()) { writeSimpleElement(xmlStreamWriter, "h3", "Dynamic Properties: "); xmlStreamWriter.writeStartElement("p"); xmlStreamWriter.writeCharacters("Dynamic Properties allow the user to specify both the name and value of a property."); xmlStreamWriter.writeStartElement("table"); xmlStreamWriter.writeAttribute("id", "dynamic-properties"); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "th", "Name"); writeSimpleElement(xmlStreamWriter, "th", "Value"); writeSimpleElement(xmlStreamWriter, "th", "Description"); xmlStreamWriter.writeEndElement(); for (final DynamicProperty dynamicProperty : dynamicProperties) { final String name = StringUtils.isBlank(dynamicProperty.getName()) ? "Not Specified" : dynamicProperty.getName(); final String value = StringUtils.isBlank(dynamicProperty.getValue()) ? "Not Specified" : dynamicProperty.getValue(); final String description = StringUtils.isBlank(dynamicProperty.getDescription()) ? "Not Specified" : dynamicProperty.getDescription(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", name, false, "name"); writeSimpleElement(xmlStreamWriter, "td", value, false, "value"); xmlStreamWriter.writeStartElement("td"); xmlStreamWriter.writeCharacters(description); xmlStreamWriter.writeEmptyElement("br"); final ExpressionLanguageScope elScope = dynamicProperty.getExpressionLanguageScope() == null ? ExpressionLanguageScope.NONE : dynamicProperty.getExpressionLanguageScope(); String text; if (elScope.equals(ExpressionLanguageScope.NONE)) { if (dynamicProperty.isExpressionLanguageSupported()) { text = "Supports Expression Language: true (undefined scope)"; } else { text = "Supports Expression Language: false"; } } else { text = switch (elScope) { case FLOWFILE_ATTRIBUTES -> "Supports Expression Language: true (will be evaluated using flow file attributes and env/syst variables registry)"; case ENVIRONMENT -> "Supports Expression Language: true (will be evaluated using env/syst variables registry only)"; default -> "Supports Expression Language: false"; }; } writeSimpleElement(xmlStreamWriter, "strong", text); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); } xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); } } private void writeStatefulInfo(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final Stateful stateful = extension.getStateful(); writeSimpleElement(xmlStreamWriter, "h3", "State management: "); if (stateful != null) { final List<String> scopes = Optional.ofNullable(stateful.getScopes()) .map(List::stream) .orElseGet(Stream::empty) .map(s -> s.toString()) .collect(Collectors.toList()); final String description = StringUtils.isBlank(stateful.getDescription()) ? "Not Specified" : stateful.getDescription(); xmlStreamWriter.writeStartElement("table"); xmlStreamWriter.writeAttribute("id", "stateful"); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "th", "Scope"); writeSimpleElement(xmlStreamWriter, "th", "Description"); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", StringUtils.join(scopes, ", ")); writeSimpleElement(xmlStreamWriter, "td", description); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); } else { xmlStreamWriter.writeCharacters("This component does not store state."); } } private void writeRestrictedInfo(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final Restricted restricted = extension.getRestricted(); writeSimpleElement(xmlStreamWriter, "h3", "Restricted: "); if (restricted != null) { final String generalRestrictionExplanation = restricted.getGeneralRestrictionExplanation(); if (!StringUtils.isBlank(generalRestrictionExplanation)) { xmlStreamWriter.writeCharacters(generalRestrictionExplanation); } final List<Restriction> restrictions = restricted.getRestrictions(); if (restrictions != null && !restrictions.isEmpty()) { xmlStreamWriter.writeStartElement("table"); xmlStreamWriter.writeAttribute("id", "restrictions"); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "th", "Required Permission"); writeSimpleElement(xmlStreamWriter, "th", "Explanation"); xmlStreamWriter.writeEndElement(); for (Restriction restriction : restrictions) { final String permission = StringUtils.isBlank(restriction.getRequiredPermission()) ? "Not Specified" : restriction.getRequiredPermission(); final String explanation = StringUtils.isBlank(restriction.getExplanation()) ? "Not Specified" : restriction.getExplanation(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", permission); writeSimpleElement(xmlStreamWriter, "td", explanation); xmlStreamWriter.writeEndElement(); } xmlStreamWriter.writeEndElement(); } else { xmlStreamWriter.writeCharacters("This component requires access to restricted components regardless of restriction."); } } else { xmlStreamWriter.writeCharacters("This component is not restricted."); } } private void writeInputRequirementInfo(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final InputRequirement inputRequirement = extension.getInputRequirement(); if (inputRequirement != null) { writeSimpleElement(xmlStreamWriter, "h3", "Input requirement: "); switch (inputRequirement) { case INPUT_FORBIDDEN: xmlStreamWriter.writeCharacters("This component does not allow an incoming relationship."); break; case INPUT_ALLOWED: xmlStreamWriter.writeCharacters("This component allows an incoming relationship."); break; case INPUT_REQUIRED: xmlStreamWriter.writeCharacters("This component requires an incoming relationship."); break; } } } private void writeSystemResourceConsiderationInfo(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { List<SystemResourceConsideration> systemResourceConsiderations = extension.getSystemResourceConsiderations(); writeSimpleElement(xmlStreamWriter, "h3", "System Resource Considerations:"); if (systemResourceConsiderations != null && !systemResourceConsiderations.isEmpty()) { xmlStreamWriter.writeStartElement("table"); xmlStreamWriter.writeAttribute("id", "system-resource-considerations"); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "th", "Resource"); writeSimpleElement(xmlStreamWriter, "th", "Description"); xmlStreamWriter.writeEndElement(); for (SystemResourceConsideration systemResourceConsideration : systemResourceConsiderations) { final String resource = StringUtils.isBlank(systemResourceConsideration.getResource()) ? "Not Specified" : systemResourceConsideration.getResource(); final String description = StringUtils.isBlank(systemResourceConsideration.getDescription()) ? "Not Specified" : systemResourceConsideration.getDescription(); xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", resource); writeSimpleElement(xmlStreamWriter, "td", description); xmlStreamWriter.writeEndElement(); } xmlStreamWriter.writeEndElement(); } else { xmlStreamWriter.writeCharacters("None specified."); } } private void writeProvidedServiceApis(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final List<ProvidedServiceAPI> serviceAPIS = extension.getProvidedServiceAPIs(); if (serviceAPIS != null && !serviceAPIS.isEmpty()) { writeSimpleElement(xmlStreamWriter, "h3", "Provided Service APIs:"); xmlStreamWriter.writeStartElement("ul"); for (final ProvidedServiceAPI serviceAPI : serviceAPIS) { final String name = getSimpleName(serviceAPI.getClassName()); final String bundleInfo = " (" + serviceAPI.getGroupId() + "-" + serviceAPI.getArtifactId() + "-" + serviceAPI.getVersion() + ")"; xmlStreamWriter.writeStartElement("li"); xmlStreamWriter.writeCharacters(name); xmlStreamWriter.writeStartElement("i"); xmlStreamWriter.writeCharacters(bundleInfo); xmlStreamWriter.writeEndElement(); xmlStreamWriter.writeEndElement(); } xmlStreamWriter.writeEndElement(); } } private void writeSeeAlso(final Extension extension, final XMLStreamWriter xmlStreamWriter) throws XMLStreamException { final List<String> seeAlsos = extension.getSeeAlso(); if (seeAlsos != null && !seeAlsos.isEmpty()) { writeSimpleElement(xmlStreamWriter, "h3", "See Also:"); xmlStreamWriter.writeStartElement("ul"); for (final String seeAlso : seeAlsos) { writeSimpleElement(xmlStreamWriter, "li", seeAlso); } xmlStreamWriter.writeEndElement(); } } /** * Writes a begin element, then text, then end element for the element of a * users choosing. Example: &lt;p&gt;text&lt;/p&gt; * * @param writer the stream writer to use * @param elementName the name of the element * @param characters the characters to insert into the element * @throws XMLStreamException thrown if there was a problem writing to the * stream */ protected final static void writeSimpleElement(final XMLStreamWriter writer, final String elementName, final String characters) throws XMLStreamException { writeSimpleElement(writer, elementName, characters, false); } /** * Writes a begin element, then text, then end element for the element of a * users choosing. Example: &lt;p&gt;text&lt;/p&gt; * * @param writer the stream writer to use * @param elementName the name of the element * @param characters the characters to insert into the element * @param strong whether the characters should be strong or not. * @throws XMLStreamException thrown if there was a problem writing to the * stream. */ protected final static void writeSimpleElement(final XMLStreamWriter writer, final String elementName, final String characters, boolean strong) throws XMLStreamException { writeSimpleElement(writer, elementName, characters, strong, null); } /** * Writes a begin element, an id attribute(if specified), then text, then * end element for element of the users choosing. Example: &lt;p * id="p-id"&gt;text&lt;/p&gt; * * @param writer the stream writer to use * @param elementName the name of the element * @param characters the text of the element * @param strong whether to bold the text of the element or not * @param id the id of the element. specifying null will cause no element to * be written. * @throws XMLStreamException xse */ protected final static void writeSimpleElement(final XMLStreamWriter writer, final String elementName, final String characters, boolean strong, String id) throws XMLStreamException { writer.writeStartElement(elementName); if (id != null) { writer.writeAttribute("id", id); } if (strong) { writer.writeStartElement("strong"); } writer.writeCharacters(characters); if (strong) { writer.writeEndElement(); } writer.writeEndElement(); } /** * A helper method to write a link * * @param xmlStreamWriter the stream to write to * @param text the text of the link * @param location the location of the link * @throws XMLStreamException thrown if there was a problem writing to the * stream */ protected void writeLink(final XMLStreamWriter xmlStreamWriter, final String text, final String location) throws XMLStreamException { xmlStreamWriter.writeStartElement("a"); xmlStreamWriter.writeAttribute("href", location); xmlStreamWriter.writeCharacters(text); xmlStreamWriter.writeEndElement(); } }
googleapis/google-cloud-java
37,172
java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/ListEntitlementsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.channel.v1; /** * * * <pre> * Response message for * [CloudChannelService.ListEntitlements][google.cloud.channel.v1.CloudChannelService.ListEntitlements]. * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListEntitlementsResponse} */ public final class ListEntitlementsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.ListEntitlementsResponse) ListEntitlementsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListEntitlementsResponse.newBuilder() to construct. private ListEntitlementsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEntitlementsResponse() { entitlements_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEntitlementsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListEntitlementsResponse.class, com.google.cloud.channel.v1.ListEntitlementsResponse.Builder.class); } public static final int ENTITLEMENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.channel.v1.Entitlement> entitlements_; /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.channel.v1.Entitlement> getEntitlementsList() { return entitlements_; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.channel.v1.EntitlementOrBuilder> getEntitlementsOrBuilderList() { return entitlements_; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ @java.lang.Override public int getEntitlementsCount() { return entitlements_.size(); } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.Entitlement getEntitlements(int index) { return entitlements_.get(index); } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.EntitlementOrBuilder getEntitlementsOrBuilder(int index) { return entitlements_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to list the next page of results. * Pass to * [ListEntitlementsRequest.page_token][google.cloud.channel.v1.ListEntitlementsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to list the next page of results. * Pass to * [ListEntitlementsRequest.page_token][google.cloud.channel.v1.ListEntitlementsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < entitlements_.size(); i++) { output.writeMessage(1, entitlements_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < entitlements_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, entitlements_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.ListEntitlementsResponse)) { return super.equals(obj); } com.google.cloud.channel.v1.ListEntitlementsResponse other = (com.google.cloud.channel.v1.ListEntitlementsResponse) obj; if (!getEntitlementsList().equals(other.getEntitlementsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEntitlementsCount() > 0) { hash = (37 * hash) + ENTITLEMENTS_FIELD_NUMBER; hash = (53 * hash) + getEntitlementsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListEntitlementsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.channel.v1.ListEntitlementsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [CloudChannelService.ListEntitlements][google.cloud.channel.v1.CloudChannelService.ListEntitlements]. * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListEntitlementsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.ListEntitlementsResponse) com.google.cloud.channel.v1.ListEntitlementsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListEntitlementsResponse.class, com.google.cloud.channel.v1.ListEntitlementsResponse.Builder.class); } // Construct using com.google.cloud.channel.v1.ListEntitlementsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (entitlementsBuilder_ == null) { entitlements_ = java.util.Collections.emptyList(); } else { entitlements_ = null; entitlementsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementsResponse_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementsResponse getDefaultInstanceForType() { return com.google.cloud.channel.v1.ListEntitlementsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementsResponse build() { com.google.cloud.channel.v1.ListEntitlementsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementsResponse buildPartial() { com.google.cloud.channel.v1.ListEntitlementsResponse result = new com.google.cloud.channel.v1.ListEntitlementsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.channel.v1.ListEntitlementsResponse result) { if (entitlementsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { entitlements_ = java.util.Collections.unmodifiableList(entitlements_); bitField0_ = (bitField0_ & ~0x00000001); } result.entitlements_ = entitlements_; } else { result.entitlements_ = entitlementsBuilder_.build(); } } private void buildPartial0(com.google.cloud.channel.v1.ListEntitlementsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.ListEntitlementsResponse) { return mergeFrom((com.google.cloud.channel.v1.ListEntitlementsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.channel.v1.ListEntitlementsResponse other) { if (other == com.google.cloud.channel.v1.ListEntitlementsResponse.getDefaultInstance()) return this; if (entitlementsBuilder_ == null) { if (!other.entitlements_.isEmpty()) { if (entitlements_.isEmpty()) { entitlements_ = other.entitlements_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEntitlementsIsMutable(); entitlements_.addAll(other.entitlements_); } onChanged(); } } else { if (!other.entitlements_.isEmpty()) { if (entitlementsBuilder_.isEmpty()) { entitlementsBuilder_.dispose(); entitlementsBuilder_ = null; entitlements_ = other.entitlements_; bitField0_ = (bitField0_ & ~0x00000001); entitlementsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEntitlementsFieldBuilder() : null; } else { entitlementsBuilder_.addAllMessages(other.entitlements_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.channel.v1.Entitlement m = input.readMessage( com.google.cloud.channel.v1.Entitlement.parser(), extensionRegistry); if (entitlementsBuilder_ == null) { ensureEntitlementsIsMutable(); entitlements_.add(m); } else { entitlementsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.channel.v1.Entitlement> entitlements_ = java.util.Collections.emptyList(); private void ensureEntitlementsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { entitlements_ = new java.util.ArrayList<com.google.cloud.channel.v1.Entitlement>(entitlements_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.Entitlement, com.google.cloud.channel.v1.Entitlement.Builder, com.google.cloud.channel.v1.EntitlementOrBuilder> entitlementsBuilder_; /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public java.util.List<com.google.cloud.channel.v1.Entitlement> getEntitlementsList() { if (entitlementsBuilder_ == null) { return java.util.Collections.unmodifiableList(entitlements_); } else { return entitlementsBuilder_.getMessageList(); } } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public int getEntitlementsCount() { if (entitlementsBuilder_ == null) { return entitlements_.size(); } else { return entitlementsBuilder_.getCount(); } } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public com.google.cloud.channel.v1.Entitlement getEntitlements(int index) { if (entitlementsBuilder_ == null) { return entitlements_.get(index); } else { return entitlementsBuilder_.getMessage(index); } } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder setEntitlements(int index, com.google.cloud.channel.v1.Entitlement value) { if (entitlementsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntitlementsIsMutable(); entitlements_.set(index, value); onChanged(); } else { entitlementsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder setEntitlements( int index, com.google.cloud.channel.v1.Entitlement.Builder builderForValue) { if (entitlementsBuilder_ == null) { ensureEntitlementsIsMutable(); entitlements_.set(index, builderForValue.build()); onChanged(); } else { entitlementsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder addEntitlements(com.google.cloud.channel.v1.Entitlement value) { if (entitlementsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntitlementsIsMutable(); entitlements_.add(value); onChanged(); } else { entitlementsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder addEntitlements(int index, com.google.cloud.channel.v1.Entitlement value) { if (entitlementsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntitlementsIsMutable(); entitlements_.add(index, value); onChanged(); } else { entitlementsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder addEntitlements( com.google.cloud.channel.v1.Entitlement.Builder builderForValue) { if (entitlementsBuilder_ == null) { ensureEntitlementsIsMutable(); entitlements_.add(builderForValue.build()); onChanged(); } else { entitlementsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder addEntitlements( int index, com.google.cloud.channel.v1.Entitlement.Builder builderForValue) { if (entitlementsBuilder_ == null) { ensureEntitlementsIsMutable(); entitlements_.add(index, builderForValue.build()); onChanged(); } else { entitlementsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder addAllEntitlements( java.lang.Iterable<? extends com.google.cloud.channel.v1.Entitlement> values) { if (entitlementsBuilder_ == null) { ensureEntitlementsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, entitlements_); onChanged(); } else { entitlementsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder clearEntitlements() { if (entitlementsBuilder_ == null) { entitlements_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { entitlementsBuilder_.clear(); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public Builder removeEntitlements(int index) { if (entitlementsBuilder_ == null) { ensureEntitlementsIsMutable(); entitlements_.remove(index); onChanged(); } else { entitlementsBuilder_.remove(index); } return this; } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public com.google.cloud.channel.v1.Entitlement.Builder getEntitlementsBuilder(int index) { return getEntitlementsFieldBuilder().getBuilder(index); } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public com.google.cloud.channel.v1.EntitlementOrBuilder getEntitlementsOrBuilder(int index) { if (entitlementsBuilder_ == null) { return entitlements_.get(index); } else { return entitlementsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public java.util.List<? extends com.google.cloud.channel.v1.EntitlementOrBuilder> getEntitlementsOrBuilderList() { if (entitlementsBuilder_ != null) { return entitlementsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(entitlements_); } } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public com.google.cloud.channel.v1.Entitlement.Builder addEntitlementsBuilder() { return getEntitlementsFieldBuilder() .addBuilder(com.google.cloud.channel.v1.Entitlement.getDefaultInstance()); } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public com.google.cloud.channel.v1.Entitlement.Builder addEntitlementsBuilder(int index) { return getEntitlementsFieldBuilder() .addBuilder(index, com.google.cloud.channel.v1.Entitlement.getDefaultInstance()); } /** * * * <pre> * The reseller customer's entitlements. * </pre> * * <code>repeated .google.cloud.channel.v1.Entitlement entitlements = 1;</code> */ public java.util.List<com.google.cloud.channel.v1.Entitlement.Builder> getEntitlementsBuilderList() { return getEntitlementsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.Entitlement, com.google.cloud.channel.v1.Entitlement.Builder, com.google.cloud.channel.v1.EntitlementOrBuilder> getEntitlementsFieldBuilder() { if (entitlementsBuilder_ == null) { entitlementsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.Entitlement, com.google.cloud.channel.v1.Entitlement.Builder, com.google.cloud.channel.v1.EntitlementOrBuilder>( entitlements_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); entitlements_ = null; } return entitlementsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to list the next page of results. * Pass to * [ListEntitlementsRequest.page_token][google.cloud.channel.v1.ListEntitlementsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to list the next page of results. * Pass to * [ListEntitlementsRequest.page_token][google.cloud.channel.v1.ListEntitlementsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to list the next page of results. * Pass to * [ListEntitlementsRequest.page_token][google.cloud.channel.v1.ListEntitlementsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to list the next page of results. * Pass to * [ListEntitlementsRequest.page_token][google.cloud.channel.v1.ListEntitlementsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to list the next page of results. * Pass to * [ListEntitlementsRequest.page_token][google.cloud.channel.v1.ListEntitlementsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.ListEntitlementsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.ListEntitlementsResponse) private static final com.google.cloud.channel.v1.ListEntitlementsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.ListEntitlementsResponse(); } public static com.google.cloud.channel.v1.ListEntitlementsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEntitlementsResponse> PARSER = new com.google.protobuf.AbstractParser<ListEntitlementsResponse>() { @java.lang.Override public ListEntitlementsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEntitlementsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEntitlementsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,162
java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/SearchAgentsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/agent.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2; /** * * * <pre> * The response message for * [Agents.SearchAgents][google.cloud.dialogflow.v2.Agents.SearchAgents]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.SearchAgentsResponse} */ public final class SearchAgentsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.SearchAgentsResponse) SearchAgentsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use SearchAgentsResponse.newBuilder() to construct. private SearchAgentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchAgentsResponse() { agents_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchAgentsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_SearchAgentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_SearchAgentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.SearchAgentsResponse.class, com.google.cloud.dialogflow.v2.SearchAgentsResponse.Builder.class); } public static final int AGENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dialogflow.v2.Agent> agents_; /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.dialogflow.v2.Agent> getAgentsList() { return agents_; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dialogflow.v2.AgentOrBuilder> getAgentsOrBuilderList() { return agents_; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ @java.lang.Override public int getAgentsCount() { return agents_.size(); } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.v2.Agent getAgents(int index) { return agents_.get(index); } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.v2.AgentOrBuilder getAgentsOrBuilder(int index) { return agents_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < agents_.size(); i++) { output.writeMessage(1, agents_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < agents_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, agents_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.SearchAgentsResponse)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.SearchAgentsResponse other = (com.google.cloud.dialogflow.v2.SearchAgentsResponse) obj; if (!getAgentsList().equals(other.getAgentsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAgentsCount() > 0) { hash = (37 * hash) + AGENTS_FIELD_NUMBER; hash = (53 * hash) + getAgentsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.v2.SearchAgentsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for * [Agents.SearchAgents][google.cloud.dialogflow.v2.Agents.SearchAgents]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.SearchAgentsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.SearchAgentsResponse) com.google.cloud.dialogflow.v2.SearchAgentsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_SearchAgentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_SearchAgentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.SearchAgentsResponse.class, com.google.cloud.dialogflow.v2.SearchAgentsResponse.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.SearchAgentsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (agentsBuilder_ == null) { agents_ = java.util.Collections.emptyList(); } else { agents_ = null; agentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_SearchAgentsResponse_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2.SearchAgentsResponse getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.SearchAgentsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2.SearchAgentsResponse build() { com.google.cloud.dialogflow.v2.SearchAgentsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2.SearchAgentsResponse buildPartial() { com.google.cloud.dialogflow.v2.SearchAgentsResponse result = new com.google.cloud.dialogflow.v2.SearchAgentsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dialogflow.v2.SearchAgentsResponse result) { if (agentsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { agents_ = java.util.Collections.unmodifiableList(agents_); bitField0_ = (bitField0_ & ~0x00000001); } result.agents_ = agents_; } else { result.agents_ = agentsBuilder_.build(); } } private void buildPartial0(com.google.cloud.dialogflow.v2.SearchAgentsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.SearchAgentsResponse) { return mergeFrom((com.google.cloud.dialogflow.v2.SearchAgentsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.SearchAgentsResponse other) { if (other == com.google.cloud.dialogflow.v2.SearchAgentsResponse.getDefaultInstance()) return this; if (agentsBuilder_ == null) { if (!other.agents_.isEmpty()) { if (agents_.isEmpty()) { agents_ = other.agents_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAgentsIsMutable(); agents_.addAll(other.agents_); } onChanged(); } } else { if (!other.agents_.isEmpty()) { if (agentsBuilder_.isEmpty()) { agentsBuilder_.dispose(); agentsBuilder_ = null; agents_ = other.agents_; bitField0_ = (bitField0_ & ~0x00000001); agentsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAgentsFieldBuilder() : null; } else { agentsBuilder_.addAllMessages(other.agents_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dialogflow.v2.Agent m = input.readMessage( com.google.cloud.dialogflow.v2.Agent.parser(), extensionRegistry); if (agentsBuilder_ == null) { ensureAgentsIsMutable(); agents_.add(m); } else { agentsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dialogflow.v2.Agent> agents_ = java.util.Collections.emptyList(); private void ensureAgentsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { agents_ = new java.util.ArrayList<com.google.cloud.dialogflow.v2.Agent>(agents_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.v2.Agent, com.google.cloud.dialogflow.v2.Agent.Builder, com.google.cloud.dialogflow.v2.AgentOrBuilder> agentsBuilder_; /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public java.util.List<com.google.cloud.dialogflow.v2.Agent> getAgentsList() { if (agentsBuilder_ == null) { return java.util.Collections.unmodifiableList(agents_); } else { return agentsBuilder_.getMessageList(); } } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public int getAgentsCount() { if (agentsBuilder_ == null) { return agents_.size(); } else { return agentsBuilder_.getCount(); } } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public com.google.cloud.dialogflow.v2.Agent getAgents(int index) { if (agentsBuilder_ == null) { return agents_.get(index); } else { return agentsBuilder_.getMessage(index); } } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder setAgents(int index, com.google.cloud.dialogflow.v2.Agent value) { if (agentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAgentsIsMutable(); agents_.set(index, value); onChanged(); } else { agentsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder setAgents( int index, com.google.cloud.dialogflow.v2.Agent.Builder builderForValue) { if (agentsBuilder_ == null) { ensureAgentsIsMutable(); agents_.set(index, builderForValue.build()); onChanged(); } else { agentsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder addAgents(com.google.cloud.dialogflow.v2.Agent value) { if (agentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAgentsIsMutable(); agents_.add(value); onChanged(); } else { agentsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder addAgents(int index, com.google.cloud.dialogflow.v2.Agent value) { if (agentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAgentsIsMutable(); agents_.add(index, value); onChanged(); } else { agentsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder addAgents(com.google.cloud.dialogflow.v2.Agent.Builder builderForValue) { if (agentsBuilder_ == null) { ensureAgentsIsMutable(); agents_.add(builderForValue.build()); onChanged(); } else { agentsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder addAgents( int index, com.google.cloud.dialogflow.v2.Agent.Builder builderForValue) { if (agentsBuilder_ == null) { ensureAgentsIsMutable(); agents_.add(index, builderForValue.build()); onChanged(); } else { agentsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder addAllAgents( java.lang.Iterable<? extends com.google.cloud.dialogflow.v2.Agent> values) { if (agentsBuilder_ == null) { ensureAgentsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, agents_); onChanged(); } else { agentsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder clearAgents() { if (agentsBuilder_ == null) { agents_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { agentsBuilder_.clear(); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public Builder removeAgents(int index) { if (agentsBuilder_ == null) { ensureAgentsIsMutable(); agents_.remove(index); onChanged(); } else { agentsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public com.google.cloud.dialogflow.v2.Agent.Builder getAgentsBuilder(int index) { return getAgentsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public com.google.cloud.dialogflow.v2.AgentOrBuilder getAgentsOrBuilder(int index) { if (agentsBuilder_ == null) { return agents_.get(index); } else { return agentsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public java.util.List<? extends com.google.cloud.dialogflow.v2.AgentOrBuilder> getAgentsOrBuilderList() { if (agentsBuilder_ != null) { return agentsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(agents_); } } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public com.google.cloud.dialogflow.v2.Agent.Builder addAgentsBuilder() { return getAgentsFieldBuilder() .addBuilder(com.google.cloud.dialogflow.v2.Agent.getDefaultInstance()); } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public com.google.cloud.dialogflow.v2.Agent.Builder addAgentsBuilder(int index) { return getAgentsFieldBuilder() .addBuilder(index, com.google.cloud.dialogflow.v2.Agent.getDefaultInstance()); } /** * * * <pre> * The list of agents. There will be a maximum number of items returned based * on the page_size field in the request. * </pre> * * <code>repeated .google.cloud.dialogflow.v2.Agent agents = 1;</code> */ public java.util.List<com.google.cloud.dialogflow.v2.Agent.Builder> getAgentsBuilderList() { return getAgentsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.v2.Agent, com.google.cloud.dialogflow.v2.Agent.Builder, com.google.cloud.dialogflow.v2.AgentOrBuilder> getAgentsFieldBuilder() { if (agentsBuilder_ == null) { agentsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.v2.Agent, com.google.cloud.dialogflow.v2.Agent.Builder, com.google.cloud.dialogflow.v2.AgentOrBuilder>( agents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); agents_ = null; } return agentsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.SearchAgentsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.SearchAgentsResponse) private static final com.google.cloud.dialogflow.v2.SearchAgentsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.SearchAgentsResponse(); } public static com.google.cloud.dialogflow.v2.SearchAgentsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchAgentsResponse> PARSER = new com.google.protobuf.AbstractParser<SearchAgentsResponse>() { @java.lang.Override public SearchAgentsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchAgentsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchAgentsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2.SearchAgentsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,253
java-iap/proto-google-cloud-iap-v1/src/main/java/com/google/cloud/iap/v1/UpdateIapSettingsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/iap/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.iap.v1; /** * * * <pre> * The request sent to UpdateIapSettings. * </pre> * * Protobuf type {@code google.cloud.iap.v1.UpdateIapSettingsRequest} */ public final class UpdateIapSettingsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.iap.v1.UpdateIapSettingsRequest) UpdateIapSettingsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateIapSettingsRequest.newBuilder() to construct. private UpdateIapSettingsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateIapSettingsRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateIapSettingsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.iap.v1.Service .internal_static_google_cloud_iap_v1_UpdateIapSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.iap.v1.Service .internal_static_google_cloud_iap_v1_UpdateIapSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.iap.v1.UpdateIapSettingsRequest.class, com.google.cloud.iap.v1.UpdateIapSettingsRequest.Builder.class); } private int bitField0_; public static final int IAP_SETTINGS_FIELD_NUMBER = 1; private com.google.cloud.iap.v1.IapSettings iapSettings_; /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the iapSettings field is set. */ @java.lang.Override public boolean hasIapSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The iapSettings. */ @java.lang.Override public com.google.cloud.iap.v1.IapSettings getIapSettings() { return iapSettings_ == null ? com.google.cloud.iap.v1.IapSettings.getDefaultInstance() : iapSettings_; } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.iap.v1.IapSettingsOrBuilder getIapSettingsOrBuilder() { return iapSettings_ == null ? com.google.cloud.iap.v1.IapSettings.getDefaultInstance() : iapSettings_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getIapSettings()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getIapSettings()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.iap.v1.UpdateIapSettingsRequest)) { return super.equals(obj); } com.google.cloud.iap.v1.UpdateIapSettingsRequest other = (com.google.cloud.iap.v1.UpdateIapSettingsRequest) obj; if (hasIapSettings() != other.hasIapSettings()) return false; if (hasIapSettings()) { if (!getIapSettings().equals(other.getIapSettings())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasIapSettings()) { hash = (37 * hash) + IAP_SETTINGS_FIELD_NUMBER; hash = (53 * hash) + getIapSettings().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.iap.v1.UpdateIapSettingsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request sent to UpdateIapSettings. * </pre> * * Protobuf type {@code google.cloud.iap.v1.UpdateIapSettingsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.iap.v1.UpdateIapSettingsRequest) com.google.cloud.iap.v1.UpdateIapSettingsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.iap.v1.Service .internal_static_google_cloud_iap_v1_UpdateIapSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.iap.v1.Service .internal_static_google_cloud_iap_v1_UpdateIapSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.iap.v1.UpdateIapSettingsRequest.class, com.google.cloud.iap.v1.UpdateIapSettingsRequest.Builder.class); } // Construct using com.google.cloud.iap.v1.UpdateIapSettingsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getIapSettingsFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; iapSettings_ = null; if (iapSettingsBuilder_ != null) { iapSettingsBuilder_.dispose(); iapSettingsBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.iap.v1.Service .internal_static_google_cloud_iap_v1_UpdateIapSettingsRequest_descriptor; } @java.lang.Override public com.google.cloud.iap.v1.UpdateIapSettingsRequest getDefaultInstanceForType() { return com.google.cloud.iap.v1.UpdateIapSettingsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.iap.v1.UpdateIapSettingsRequest build() { com.google.cloud.iap.v1.UpdateIapSettingsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.iap.v1.UpdateIapSettingsRequest buildPartial() { com.google.cloud.iap.v1.UpdateIapSettingsRequest result = new com.google.cloud.iap.v1.UpdateIapSettingsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.iap.v1.UpdateIapSettingsRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.iapSettings_ = iapSettingsBuilder_ == null ? iapSettings_ : iapSettingsBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.iap.v1.UpdateIapSettingsRequest) { return mergeFrom((com.google.cloud.iap.v1.UpdateIapSettingsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.iap.v1.UpdateIapSettingsRequest other) { if (other == com.google.cloud.iap.v1.UpdateIapSettingsRequest.getDefaultInstance()) return this; if (other.hasIapSettings()) { mergeIapSettings(other.getIapSettings()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getIapSettingsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.iap.v1.IapSettings iapSettings_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.iap.v1.IapSettings, com.google.cloud.iap.v1.IapSettings.Builder, com.google.cloud.iap.v1.IapSettingsOrBuilder> iapSettingsBuilder_; /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the iapSettings field is set. */ public boolean hasIapSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The iapSettings. */ public com.google.cloud.iap.v1.IapSettings getIapSettings() { if (iapSettingsBuilder_ == null) { return iapSettings_ == null ? com.google.cloud.iap.v1.IapSettings.getDefaultInstance() : iapSettings_; } else { return iapSettingsBuilder_.getMessage(); } } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIapSettings(com.google.cloud.iap.v1.IapSettings value) { if (iapSettingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } iapSettings_ = value; } else { iapSettingsBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIapSettings(com.google.cloud.iap.v1.IapSettings.Builder builderForValue) { if (iapSettingsBuilder_ == null) { iapSettings_ = builderForValue.build(); } else { iapSettingsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeIapSettings(com.google.cloud.iap.v1.IapSettings value) { if (iapSettingsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && iapSettings_ != null && iapSettings_ != com.google.cloud.iap.v1.IapSettings.getDefaultInstance()) { getIapSettingsBuilder().mergeFrom(value); } else { iapSettings_ = value; } } else { iapSettingsBuilder_.mergeFrom(value); } if (iapSettings_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearIapSettings() { bitField0_ = (bitField0_ & ~0x00000001); iapSettings_ = null; if (iapSettingsBuilder_ != null) { iapSettingsBuilder_.dispose(); iapSettingsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.iap.v1.IapSettings.Builder getIapSettingsBuilder() { bitField0_ |= 0x00000001; onChanged(); return getIapSettingsFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.iap.v1.IapSettingsOrBuilder getIapSettingsOrBuilder() { if (iapSettingsBuilder_ != null) { return iapSettingsBuilder_.getMessageOrBuilder(); } else { return iapSettings_ == null ? com.google.cloud.iap.v1.IapSettings.getDefaultInstance() : iapSettings_; } } /** * * * <pre> * Required. The new values for the IAP settings to be updated. * Authorization: Requires the `updateSettings` permission for the associated * resource. * </pre> * * <code> * .google.cloud.iap.v1.IapSettings iap_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.iap.v1.IapSettings, com.google.cloud.iap.v1.IapSettings.Builder, com.google.cloud.iap.v1.IapSettingsOrBuilder> getIapSettingsFieldBuilder() { if (iapSettingsBuilder_ == null) { iapSettingsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.iap.v1.IapSettings, com.google.cloud.iap.v1.IapSettings.Builder, com.google.cloud.iap.v1.IapSettingsOrBuilder>( getIapSettings(), getParentForChildren(), isClean()); iapSettings_ = null; } return iapSettingsBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The field mask specifying which IAP settings should be updated. * If omitted, then all of the settings are updated. See * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * * Note: All IAP reauth settings must always be set together, using the * field mask: `iapSettings.accessSettings.reauthSettings`. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.iap.v1.UpdateIapSettingsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.iap.v1.UpdateIapSettingsRequest) private static final com.google.cloud.iap.v1.UpdateIapSettingsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.iap.v1.UpdateIapSettingsRequest(); } public static com.google.cloud.iap.v1.UpdateIapSettingsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateIapSettingsRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateIapSettingsRequest>() { @java.lang.Override public UpdateIapSettingsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateIapSettingsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateIapSettingsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.iap.v1.UpdateIapSettingsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,141
java-analyticshub/proto-google-cloud-analyticshub-v1/src/main/java/com/google/cloud/bigquery/analyticshub/v1/CreateListingRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/analyticshub/v1/analyticshub.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.analyticshub.v1; /** * * * <pre> * Message for creating a listing. * </pre> * * Protobuf type {@code google.cloud.bigquery.analyticshub.v1.CreateListingRequest} */ public final class CreateListingRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.analyticshub.v1.CreateListingRequest) CreateListingRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateListingRequest.newBuilder() to construct. private CreateListingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateListingRequest() { parent_ = ""; listingId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateListingRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_CreateListingRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_CreateListingRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest.class, com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource path of the listing. * e.g. `projects/myproject/locations/us/dataExchanges/123`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource path of the listing. * e.g. `projects/myproject/locations/us/dataExchanges/123`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LISTING_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object listingId_ = ""; /** * * * <pre> * Required. The ID of the listing to create. * Must contain only Unicode letters, numbers (0-9), underscores (_). * Max length: 100 bytes. * </pre> * * <code>string listing_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The listingId. */ @java.lang.Override public java.lang.String getListingId() { java.lang.Object ref = listingId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); listingId_ = s; return s; } } /** * * * <pre> * Required. The ID of the listing to create. * Must contain only Unicode letters, numbers (0-9), underscores (_). * Max length: 100 bytes. * </pre> * * <code>string listing_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for listingId. */ @java.lang.Override public com.google.protobuf.ByteString getListingIdBytes() { java.lang.Object ref = listingId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); listingId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LISTING_FIELD_NUMBER = 3; private com.google.cloud.bigquery.analyticshub.v1.Listing listing_; /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the listing field is set. */ @java.lang.Override public boolean hasListing() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The listing. */ @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.Listing getListing() { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder getListingOrBuilder() { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(listingId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, listingId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getListing()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(listingId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, listingId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getListing()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest)) { return super.equals(obj); } com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest other = (com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getListingId().equals(other.getListingId())) return false; if (hasListing() != other.hasListing()) return false; if (hasListing()) { if (!getListing().equals(other.getListing())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + LISTING_ID_FIELD_NUMBER; hash = (53 * hash) + getListingId().hashCode(); if (hasListing()) { hash = (37 * hash) + LISTING_FIELD_NUMBER; hash = (53 * hash) + getListing().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for creating a listing. * </pre> * * Protobuf type {@code google.cloud.bigquery.analyticshub.v1.CreateListingRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.analyticshub.v1.CreateListingRequest) com.google.cloud.bigquery.analyticshub.v1.CreateListingRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_CreateListingRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_CreateListingRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest.class, com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest.Builder.class); } // Construct using com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getListingFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; listingId_ = ""; listing_ = null; if (listingBuilder_ != null) { listingBuilder_.dispose(); listingBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_CreateListingRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest build() { com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest buildPartial() { com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest result = new com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.listingId_ = listingId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.listing_ = listingBuilder_ == null ? listing_ : listingBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest) { return mergeFrom((com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest other) { if (other == com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getListingId().isEmpty()) { listingId_ = other.listingId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasListing()) { mergeListing(other.getListing()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { listingId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getListingFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource path of the listing. * e.g. `projects/myproject/locations/us/dataExchanges/123`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource path of the listing. * e.g. `projects/myproject/locations/us/dataExchanges/123`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource path of the listing. * e.g. `projects/myproject/locations/us/dataExchanges/123`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent resource path of the listing. * e.g. `projects/myproject/locations/us/dataExchanges/123`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent resource path of the listing. * e.g. `projects/myproject/locations/us/dataExchanges/123`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object listingId_ = ""; /** * * * <pre> * Required. The ID of the listing to create. * Must contain only Unicode letters, numbers (0-9), underscores (_). * Max length: 100 bytes. * </pre> * * <code>string listing_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The listingId. */ public java.lang.String getListingId() { java.lang.Object ref = listingId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); listingId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID of the listing to create. * Must contain only Unicode letters, numbers (0-9), underscores (_). * Max length: 100 bytes. * </pre> * * <code>string listing_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for listingId. */ public com.google.protobuf.ByteString getListingIdBytes() { java.lang.Object ref = listingId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); listingId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID of the listing to create. * Must contain only Unicode letters, numbers (0-9), underscores (_). * Max length: 100 bytes. * </pre> * * <code>string listing_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The listingId to set. * @return This builder for chaining. */ public Builder setListingId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } listingId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The ID of the listing to create. * Must contain only Unicode letters, numbers (0-9), underscores (_). * Max length: 100 bytes. * </pre> * * <code>string listing_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearListingId() { listingId_ = getDefaultInstance().getListingId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The ID of the listing to create. * Must contain only Unicode letters, numbers (0-9), underscores (_). * Max length: 100 bytes. * </pre> * * <code>string listing_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for listingId to set. * @return This builder for chaining. */ public Builder setListingIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); listingId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.bigquery.analyticshub.v1.Listing listing_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.Listing, com.google.cloud.bigquery.analyticshub.v1.Listing.Builder, com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder> listingBuilder_; /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the listing field is set. */ public boolean hasListing() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The listing. */ public com.google.cloud.bigquery.analyticshub.v1.Listing getListing() { if (listingBuilder_ == null) { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } else { return listingBuilder_.getMessage(); } } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setListing(com.google.cloud.bigquery.analyticshub.v1.Listing value) { if (listingBuilder_ == null) { if (value == null) { throw new NullPointerException(); } listing_ = value; } else { listingBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setListing( com.google.cloud.bigquery.analyticshub.v1.Listing.Builder builderForValue) { if (listingBuilder_ == null) { listing_ = builderForValue.build(); } else { listingBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeListing(com.google.cloud.bigquery.analyticshub.v1.Listing value) { if (listingBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && listing_ != null && listing_ != com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance()) { getListingBuilder().mergeFrom(value); } else { listing_ = value; } } else { listingBuilder_.mergeFrom(value); } if (listing_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearListing() { bitField0_ = (bitField0_ & ~0x00000004); listing_ = null; if (listingBuilder_ != null) { listingBuilder_.dispose(); listingBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.analyticshub.v1.Listing.Builder getListingBuilder() { bitField0_ |= 0x00000004; onChanged(); return getListingFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder getListingOrBuilder() { if (listingBuilder_ != null) { return listingBuilder_.getMessageOrBuilder(); } else { return listing_ == null ? com.google.cloud.bigquery.analyticshub.v1.Listing.getDefaultInstance() : listing_; } } /** * * * <pre> * Required. The listing to create. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.Listing listing = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.Listing, com.google.cloud.bigquery.analyticshub.v1.Listing.Builder, com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder> getListingFieldBuilder() { if (listingBuilder_ == null) { listingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.Listing, com.google.cloud.bigquery.analyticshub.v1.Listing.Builder, com.google.cloud.bigquery.analyticshub.v1.ListingOrBuilder>( getListing(), getParentForChildren(), isClean()); listing_ = null; } return listingBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.analyticshub.v1.CreateListingRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.analyticshub.v1.CreateListingRequest) private static final com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest(); } public static com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateListingRequest> PARSER = new com.google.protobuf.AbstractParser<CreateListingRequest>() { @java.lang.Override public CreateListingRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateListingRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateListingRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.CreateListingRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,233
java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/UpdateMeasurementProtocolSecretRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1beta/analytics_admin.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1beta; /** * * * <pre> * Request message for UpdateMeasurementProtocolSecret RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest} */ public final class UpdateMeasurementProtocolSecretRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest) UpdateMeasurementProtocolSecretRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateMeasurementProtocolSecretRequest.newBuilder() to construct. private UpdateMeasurementProtocolSecretRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateMeasurementProtocolSecretRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateMeasurementProtocolSecretRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateMeasurementProtocolSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateMeasurementProtocolSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest.class, com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest.Builder.class); } private int bitField0_; public static final int MEASUREMENT_PROTOCOL_SECRET_FIELD_NUMBER = 1; private com.google.analytics.admin.v1beta.MeasurementProtocolSecret measurementProtocolSecret_; /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the measurementProtocolSecret field is set. */ @java.lang.Override public boolean hasMeasurementProtocolSecret() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The measurementProtocolSecret. */ @java.lang.Override public com.google.analytics.admin.v1beta.MeasurementProtocolSecret getMeasurementProtocolSecret() { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1beta.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.analytics.admin.v1beta.MeasurementProtocolSecretOrBuilder getMeasurementProtocolSecretOrBuilder() { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1beta.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMeasurementProtocolSecret()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, getMeasurementProtocolSecret()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest)) { return super.equals(obj); } com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest other = (com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest) obj; if (hasMeasurementProtocolSecret() != other.hasMeasurementProtocolSecret()) return false; if (hasMeasurementProtocolSecret()) { if (!getMeasurementProtocolSecret().equals(other.getMeasurementProtocolSecret())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMeasurementProtocolSecret()) { hash = (37 * hash) + MEASUREMENT_PROTOCOL_SECRET_FIELD_NUMBER; hash = (53 * hash) + getMeasurementProtocolSecret().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for UpdateMeasurementProtocolSecret RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest) com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateMeasurementProtocolSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateMeasurementProtocolSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest.class, com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest.Builder .class); } // Construct using // com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMeasurementProtocolSecretFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; measurementProtocolSecret_ = null; if (measurementProtocolSecretBuilder_ != null) { measurementProtocolSecretBuilder_.dispose(); measurementProtocolSecretBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1beta.AnalyticsAdminProto .internal_static_google_analytics_admin_v1beta_UpdateMeasurementProtocolSecretRequest_descriptor; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest getDefaultInstanceForType() { return com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest .getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest build() { com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest buildPartial() { com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest result = new com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.measurementProtocolSecret_ = measurementProtocolSecretBuilder_ == null ? measurementProtocolSecret_ : measurementProtocolSecretBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest) { return mergeFrom( (com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest other) { if (other == com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest .getDefaultInstance()) return this; if (other.hasMeasurementProtocolSecret()) { mergeMeasurementProtocolSecret(other.getMeasurementProtocolSecret()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getMeasurementProtocolSecretFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.analytics.admin.v1beta.MeasurementProtocolSecret measurementProtocolSecret_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.MeasurementProtocolSecret, com.google.analytics.admin.v1beta.MeasurementProtocolSecret.Builder, com.google.analytics.admin.v1beta.MeasurementProtocolSecretOrBuilder> measurementProtocolSecretBuilder_; /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the measurementProtocolSecret field is set. */ public boolean hasMeasurementProtocolSecret() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The measurementProtocolSecret. */ public com.google.analytics.admin.v1beta.MeasurementProtocolSecret getMeasurementProtocolSecret() { if (measurementProtocolSecretBuilder_ == null) { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1beta.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } else { return measurementProtocolSecretBuilder_.getMessage(); } } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMeasurementProtocolSecret( com.google.analytics.admin.v1beta.MeasurementProtocolSecret value) { if (measurementProtocolSecretBuilder_ == null) { if (value == null) { throw new NullPointerException(); } measurementProtocolSecret_ = value; } else { measurementProtocolSecretBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMeasurementProtocolSecret( com.google.analytics.admin.v1beta.MeasurementProtocolSecret.Builder builderForValue) { if (measurementProtocolSecretBuilder_ == null) { measurementProtocolSecret_ = builderForValue.build(); } else { measurementProtocolSecretBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMeasurementProtocolSecret( com.google.analytics.admin.v1beta.MeasurementProtocolSecret value) { if (measurementProtocolSecretBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && measurementProtocolSecret_ != null && measurementProtocolSecret_ != com.google.analytics.admin.v1beta.MeasurementProtocolSecret .getDefaultInstance()) { getMeasurementProtocolSecretBuilder().mergeFrom(value); } else { measurementProtocolSecret_ = value; } } else { measurementProtocolSecretBuilder_.mergeFrom(value); } if (measurementProtocolSecret_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMeasurementProtocolSecret() { bitField0_ = (bitField0_ & ~0x00000001); measurementProtocolSecret_ = null; if (measurementProtocolSecretBuilder_ != null) { measurementProtocolSecretBuilder_.dispose(); measurementProtocolSecretBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1beta.MeasurementProtocolSecret.Builder getMeasurementProtocolSecretBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMeasurementProtocolSecretFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1beta.MeasurementProtocolSecretOrBuilder getMeasurementProtocolSecretOrBuilder() { if (measurementProtocolSecretBuilder_ != null) { return measurementProtocolSecretBuilder_.getMessageOrBuilder(); } else { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1beta.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1beta.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.MeasurementProtocolSecret, com.google.analytics.admin.v1beta.MeasurementProtocolSecret.Builder, com.google.analytics.admin.v1beta.MeasurementProtocolSecretOrBuilder> getMeasurementProtocolSecretFieldBuilder() { if (measurementProtocolSecretBuilder_ == null) { measurementProtocolSecretBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1beta.MeasurementProtocolSecret, com.google.analytics.admin.v1beta.MeasurementProtocolSecret.Builder, com.google.analytics.admin.v1beta.MeasurementProtocolSecretOrBuilder>( getMeasurementProtocolSecret(), getParentForChildren(), isClean()); measurementProtocolSecret_ = null; } return measurementProtocolSecretBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest) private static final com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest(); } public static com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateMeasurementProtocolSecretRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateMeasurementProtocolSecretRequest>() { @java.lang.Override public UpdateMeasurementProtocolSecretRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateMeasurementProtocolSecretRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateMeasurementProtocolSecretRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1beta.UpdateMeasurementProtocolSecretRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-kie-optaplanner
37,470
core/optaplanner-core-impl/src/main/java/org/optaplanner/core/impl/heuristic/selector/value/ValueSelectorFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.optaplanner.core.impl.heuristic.selector.value; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import org.optaplanner.core.api.domain.solution.PlanningSolution; import org.optaplanner.core.api.domain.valuerange.ValueRangeProvider; import org.optaplanner.core.config.heuristic.selector.common.SelectionCacheType; import org.optaplanner.core.config.heuristic.selector.common.SelectionOrder; import org.optaplanner.core.config.heuristic.selector.common.decorator.SelectionSorterOrder; import org.optaplanner.core.config.heuristic.selector.common.nearby.NearbySelectionConfig; import org.optaplanner.core.config.heuristic.selector.value.ValueSelectorConfig; import org.optaplanner.core.impl.domain.entity.descriptor.EntityDescriptor; import org.optaplanner.core.impl.domain.solution.descriptor.SolutionDescriptor; import org.optaplanner.core.impl.domain.valuerange.descriptor.EntityIndependentValueRangeDescriptor; import org.optaplanner.core.impl.domain.valuerange.descriptor.ValueRangeDescriptor; import org.optaplanner.core.impl.domain.variable.descriptor.GenuineVariableDescriptor; import org.optaplanner.core.impl.heuristic.HeuristicConfigPolicy; import org.optaplanner.core.impl.heuristic.selector.AbstractSelectorFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.ComparatorSelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionFilter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionProbabilityWeightFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorterWeightFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.WeightFactorySelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.nearby.NearbyDistanceMeter; import org.optaplanner.core.impl.heuristic.selector.common.nearby.NearbyRandom; import org.optaplanner.core.impl.heuristic.selector.common.nearby.NearbyRandomFactory; import org.optaplanner.core.impl.heuristic.selector.entity.EntitySelector; import org.optaplanner.core.impl.heuristic.selector.entity.EntitySelectorFactory; import org.optaplanner.core.impl.heuristic.selector.value.decorator.AssignedValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.CachingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.DowncastingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.EntityDependentSortingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.FilteringValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.InitializedValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.ProbabilityValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.ReinitializeVariableValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.SelectedCountLimitValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.ShufflingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.SortingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.UnassignedValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.mimic.MimicRecordingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.mimic.MimicReplayingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.mimic.ValueMimicRecorder; import org.optaplanner.core.impl.heuristic.selector.value.nearby.NearEntityNearbyValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.nearby.NearValueNearbyValueSelector; import org.optaplanner.core.impl.solver.ClassInstanceCache; public class ValueSelectorFactory<Solution_> extends AbstractSelectorFactory<Solution_, ValueSelectorConfig> { public static <Solution_> ValueSelectorFactory<Solution_> create(ValueSelectorConfig valueSelectorConfig) { return new ValueSelectorFactory<>(valueSelectorConfig); } public ValueSelectorFactory(ValueSelectorConfig valueSelectorConfig) { super(valueSelectorConfig); } public GenuineVariableDescriptor<Solution_> extractVariableDescriptor(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor) { String variableName = config.getVariableName(); if (variableName != null) { return getVariableDescriptorForName(downcastEntityDescriptor(configPolicy, entityDescriptor), variableName); } else if (config.getMimicSelectorRef() != null) { return configPolicy.getValueMimicRecorder(config.getMimicSelectorRef()).getVariableDescriptor(); } else { return null; } } /** * @param configPolicy never null * @param entityDescriptor never null * @param minimumCacheType never null, If caching is used (different from {@link SelectionCacheType#JUST_IN_TIME}), * then it should be at least this {@link SelectionCacheType} because an ancestor already uses such caching * and less would be pointless. * @param inheritedSelectionOrder never null * @return never null */ public ValueSelector<Solution_> buildValueSelector(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor, SelectionCacheType minimumCacheType, SelectionOrder inheritedSelectionOrder) { return buildValueSelector(configPolicy, entityDescriptor, minimumCacheType, inheritedSelectionOrder, configPolicy.isReinitializeVariableFilterEnabled(), ListValueFilteringType.NONE); } public ValueSelector<Solution_> buildValueSelector(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor, SelectionCacheType minimumCacheType, SelectionOrder inheritedSelectionOrder, boolean applyReinitializeVariableFiltering, ListValueFilteringType listValueFilteringType) { GenuineVariableDescriptor<Solution_> variableDescriptor = deduceGenuineVariableDescriptor( downcastEntityDescriptor(configPolicy, entityDescriptor), config.getVariableName()); if (config.getMimicSelectorRef() != null) { ValueSelector<Solution_> valueSelector = buildMimicReplaying(configPolicy); valueSelector = applyReinitializeVariableFiltering(applyReinitializeVariableFiltering, variableDescriptor, valueSelector); valueSelector = applyDowncasting(valueSelector); return valueSelector; } SelectionCacheType resolvedCacheType = SelectionCacheType.resolve(config.getCacheType(), minimumCacheType); SelectionOrder resolvedSelectionOrder = SelectionOrder.resolve(config.getSelectionOrder(), inheritedSelectionOrder); if (config.getNearbySelectionConfig() != null) { config.getNearbySelectionConfig().validateNearby(resolvedCacheType, resolvedSelectionOrder); } validateCacheTypeVersusSelectionOrder(resolvedCacheType, resolvedSelectionOrder); validateSorting(resolvedSelectionOrder); validateProbability(resolvedSelectionOrder); validateSelectedLimit(minimumCacheType); // baseValueSelector and lower should be SelectionOrder.ORIGINAL if they are going to get cached completely ValueSelector<Solution_> valueSelector = buildBaseValueSelector(variableDescriptor, SelectionCacheType.max(minimumCacheType, resolvedCacheType), determineBaseRandomSelection(variableDescriptor, resolvedCacheType, resolvedSelectionOrder)); if (config.getNearbySelectionConfig() != null) { // TODO Static filtering (such as movableEntitySelectionFilter) should affect nearbySelection too valueSelector = applyNearbySelection(configPolicy, entityDescriptor, minimumCacheType, resolvedSelectionOrder, valueSelector); } ClassInstanceCache instanceCache = configPolicy.getClassInstanceCache(); valueSelector = applyFiltering(valueSelector, instanceCache); valueSelector = applyInitializedChainedValueFilter(configPolicy, variableDescriptor, valueSelector); valueSelector = applySorting(resolvedCacheType, resolvedSelectionOrder, valueSelector, instanceCache); valueSelector = applyProbability(resolvedCacheType, resolvedSelectionOrder, valueSelector, instanceCache); valueSelector = applyShuffling(resolvedCacheType, resolvedSelectionOrder, valueSelector); valueSelector = applyCaching(resolvedCacheType, resolvedSelectionOrder, valueSelector); valueSelector = applySelectedLimit(valueSelector); valueSelector = applyListValueFiltering(configPolicy, listValueFilteringType, variableDescriptor, valueSelector); valueSelector = applyMimicRecording(configPolicy, valueSelector); valueSelector = applyReinitializeVariableFiltering(applyReinitializeVariableFiltering, variableDescriptor, valueSelector); valueSelector = applyDowncasting(valueSelector); return valueSelector; } protected ValueSelector<Solution_> buildMimicReplaying(HeuristicConfigPolicy<Solution_> configPolicy) { if (config.getId() != null || config.getVariableName() != null || config.getCacheType() != null || config.getSelectionOrder() != null || config.getNearbySelectionConfig() != null || config.getFilterClass() != null || config.getSorterManner() != null || config.getSorterComparatorClass() != null || config.getSorterWeightFactoryClass() != null || config.getSorterOrder() != null || config.getSorterClass() != null || config.getProbabilityWeightFactoryClass() != null || config.getSelectedCountLimit() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with mimicSelectorRef (" + config.getMimicSelectorRef() + ") has another property that is not null."); } ValueMimicRecorder<Solution_> valueMimicRecorder = configPolicy.getValueMimicRecorder(config.getMimicSelectorRef()); if (valueMimicRecorder == null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has a mimicSelectorRef (" + config.getMimicSelectorRef() + ") for which no valueSelector with that id exists (in its solver phase)."); } return new MimicReplayingValueSelector<>(valueMimicRecorder); } protected EntityDescriptor<Solution_> downcastEntityDescriptor(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor) { if (config.getDowncastEntityClass() != null) { Class<?> parentEntityClass = entityDescriptor.getEntityClass(); if (!parentEntityClass.isAssignableFrom(config.getDowncastEntityClass())) { throw new IllegalStateException("The downcastEntityClass (" + config.getDowncastEntityClass() + ") is not a subclass of the parentEntityClass (" + parentEntityClass + ") configured by the " + EntitySelector.class.getSimpleName() + "."); } SolutionDescriptor<Solution_> solutionDescriptor = configPolicy.getSolutionDescriptor(); entityDescriptor = solutionDescriptor.getEntityDescriptorStrict(config.getDowncastEntityClass()); if (entityDescriptor == null) { throw new IllegalArgumentException("The selectorConfig (" + config + ") has an downcastEntityClass (" + config.getDowncastEntityClass() + ") that is not a known planning entity.\n" + "Check your solver configuration. If that class (" + config.getDowncastEntityClass().getSimpleName() + ") is not in the entityClassSet (" + solutionDescriptor.getEntityClassSet() + "), check your @" + PlanningSolution.class.getSimpleName() + " implementation's annotated methods too."); } } return entityDescriptor; } protected boolean determineBaseRandomSelection(GenuineVariableDescriptor<Solution_> variableDescriptor, SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder) { switch (resolvedSelectionOrder) { case ORIGINAL: return false; case SORTED: case SHUFFLED: case PROBABILISTIC: // baseValueSelector and lower should be ORIGINAL if they are going to get cached completely return false; case RANDOM: // Predict if caching will occur return resolvedCacheType.isNotCached() || (isBaseInherentlyCached(variableDescriptor) && !hasFiltering(variableDescriptor)); default: throw new IllegalStateException("The selectionOrder (" + resolvedSelectionOrder + ") is not implemented."); } } protected boolean isBaseInherentlyCached(GenuineVariableDescriptor<Solution_> variableDescriptor) { return variableDescriptor.isValueRangeEntityIndependent(); } private ValueSelector<Solution_> buildBaseValueSelector(GenuineVariableDescriptor<Solution_> variableDescriptor, SelectionCacheType minimumCacheType, boolean randomSelection) { ValueRangeDescriptor<Solution_> valueRangeDescriptor = variableDescriptor.getValueRangeDescriptor(); // TODO minimumCacheType SOLVER is only a problem if the valueRange includes entities or custom weird cloning if (minimumCacheType == SelectionCacheType.SOLVER) { // TODO Solver cached entities are not compatible with DroolsScoreCalculator and IncrementalScoreDirector // because between phases the entities get cloned and the KieSession/Maps contains those clones afterwards // https://issues.redhat.com/browse/PLANNER-54 throw new IllegalArgumentException("The minimumCacheType (" + minimumCacheType + ") is not yet supported. Please use " + SelectionCacheType.PHASE + " instead."); } if (valueRangeDescriptor.isEntityIndependent()) { return new FromSolutionPropertyValueSelector<>( (EntityIndependentValueRangeDescriptor<Solution_>) valueRangeDescriptor, minimumCacheType, randomSelection); } else { // TODO Do not allow PHASE cache on FromEntityPropertyValueSelector, except if the moveSelector is PHASE cached too. return new FromEntityPropertyValueSelector<>(valueRangeDescriptor, randomSelection); } } private boolean hasFiltering(GenuineVariableDescriptor<Solution_> variableDescriptor) { return config.getFilterClass() != null || variableDescriptor.hasMovableChainedTrailingValueFilter(); } protected ValueSelector<Solution_> applyFiltering(ValueSelector<Solution_> valueSelector, ClassInstanceCache instanceCache) { GenuineVariableDescriptor<Solution_> variableDescriptor = valueSelector.getVariableDescriptor(); if (hasFiltering(variableDescriptor)) { List<SelectionFilter<Solution_, Object>> filterList = new ArrayList<>(config.getFilterClass() == null ? 1 : 2); if (config.getFilterClass() != null) { filterList.add(instanceCache.newInstance(config, "filterClass", config.getFilterClass())); } // Filter out pinned entities if (variableDescriptor.hasMovableChainedTrailingValueFilter()) { filterList.add(variableDescriptor.getMovableChainedTrailingValueFilter()); } valueSelector = FilteringValueSelector.create(valueSelector, filterList); } return valueSelector; } protected ValueSelector<Solution_> applyInitializedChainedValueFilter(HeuristicConfigPolicy<Solution_> configPolicy, GenuineVariableDescriptor<Solution_> variableDescriptor, ValueSelector<Solution_> valueSelector) { if (configPolicy.isInitializedChainedValueFilterEnabled() && variableDescriptor.isChained()) { valueSelector = InitializedValueSelector.create(valueSelector); } return valueSelector; } protected void validateSorting(SelectionOrder resolvedSelectionOrder) { if ((config.getSorterManner() != null || config.getSorterComparatorClass() != null || config.getSorterWeightFactoryClass() != null || config.getSorterOrder() != null || config.getSorterClass() != null) && resolvedSelectionOrder != SelectionOrder.SORTED) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with sorterManner (" + config.getSorterManner() + ") and sorterComparatorClass (" + config.getSorterComparatorClass() + ") and sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ") and sorterOrder (" + config.getSorterOrder() + ") and sorterClass (" + config.getSorterClass() + ") has a resolvedSelectionOrder (" + resolvedSelectionOrder + ") that is not " + SelectionOrder.SORTED + "."); } if (config.getSorterManner() != null && config.getSorterComparatorClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterManner (" + config.getSorterManner() + ") and a sorterComparatorClass (" + config.getSorterComparatorClass() + ")."); } if (config.getSorterManner() != null && config.getSorterWeightFactoryClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterManner (" + config.getSorterManner() + ") and a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ")."); } if (config.getSorterManner() != null && config.getSorterClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterManner (" + config.getSorterManner() + ") and a sorterClass (" + config.getSorterClass() + ")."); } if (config.getSorterManner() != null && config.getSorterOrder() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with sorterManner (" + config.getSorterManner() + ") has a non-null sorterOrder (" + config.getSorterOrder() + ")."); } if (config.getSorterComparatorClass() != null && config.getSorterWeightFactoryClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterComparatorClass (" + config.getSorterComparatorClass() + ") and a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ")."); } if (config.getSorterComparatorClass() != null && config.getSorterClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterComparatorClass (" + config.getSorterComparatorClass() + ") and a sorterClass (" + config.getSorterClass() + ")."); } if (config.getSorterWeightFactoryClass() != null && config.getSorterClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ") and a sorterClass (" + config.getSorterClass() + ")."); } if (config.getSorterClass() != null && config.getSorterOrder() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with sorterClass (" + config.getSorterClass() + ") has a non-null sorterOrder (" + config.getSorterOrder() + ")."); } } protected ValueSelector<Solution_> applySorting(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector, ClassInstanceCache instanceCache) { if (resolvedSelectionOrder == SelectionOrder.SORTED) { SelectionSorter<Solution_, Object> sorter; if (config.getSorterManner() != null) { GenuineVariableDescriptor<Solution_> variableDescriptor = valueSelector.getVariableDescriptor(); if (!ValueSelectorConfig.hasSorter(config.getSorterManner(), variableDescriptor)) { return valueSelector; } sorter = ValueSelectorConfig.determineSorter(config.getSorterManner(), variableDescriptor); } else if (config.getSorterComparatorClass() != null) { Comparator<Object> sorterComparator = instanceCache.newInstance(config, "sorterComparatorClass", config.getSorterComparatorClass()); sorter = new ComparatorSelectionSorter<>(sorterComparator, SelectionSorterOrder.resolve(config.getSorterOrder())); } else if (config.getSorterWeightFactoryClass() != null) { SelectionSorterWeightFactory<Solution_, Object> sorterWeightFactory = instanceCache.newInstance(config, "sorterWeightFactoryClass", config.getSorterWeightFactoryClass()); sorter = new WeightFactorySelectionSorter<>(sorterWeightFactory, SelectionSorterOrder.resolve(config.getSorterOrder())); } else if (config.getSorterClass() != null) { sorter = instanceCache.newInstance(config, "sorterClass", config.getSorterClass()); } else { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs a sorterManner (" + config.getSorterManner() + ") or a sorterComparatorClass (" + config.getSorterComparatorClass() + ") or a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ") or a sorterClass (" + config.getSorterClass() + ")."); } if (!valueSelector.getVariableDescriptor().isValueRangeEntityIndependent() && resolvedCacheType == SelectionCacheType.STEP) { valueSelector = new EntityDependentSortingValueSelector<>(valueSelector, resolvedCacheType, sorter); } else { if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new SortingValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType, sorter); } } return valueSelector; } protected void validateProbability(SelectionOrder resolvedSelectionOrder) { if (config.getProbabilityWeightFactoryClass() != null && resolvedSelectionOrder != SelectionOrder.PROBABILISTIC) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with probabilityWeightFactoryClass (" + config.getProbabilityWeightFactoryClass() + ") has a resolvedSelectionOrder (" + resolvedSelectionOrder + ") that is not " + SelectionOrder.PROBABILISTIC + "."); } } protected ValueSelector<Solution_> applyProbability(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector, ClassInstanceCache instanceCache) { if (resolvedSelectionOrder == SelectionOrder.PROBABILISTIC) { if (config.getProbabilityWeightFactoryClass() == null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs a probabilityWeightFactoryClass (" + config.getProbabilityWeightFactoryClass() + ")."); } SelectionProbabilityWeightFactory<Solution_, Object> probabilityWeightFactory = instanceCache.newInstance(config, "probabilityWeightFactoryClass", config.getProbabilityWeightFactoryClass()); if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new ProbabilityValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType, probabilityWeightFactory); } return valueSelector; } private ValueSelector<Solution_> applyShuffling(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { if (resolvedSelectionOrder == SelectionOrder.SHUFFLED) { if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new ShufflingValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType); } return valueSelector; } private ValueSelector<Solution_> applyCaching(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { if (resolvedCacheType.isCached() && resolvedCacheType.compareTo(valueSelector.getCacheType()) > 0) { if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new CachingValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType, resolvedSelectionOrder.toRandomSelectionBoolean()); } return valueSelector; } private void validateSelectedLimit(SelectionCacheType minimumCacheType) { if (config.getSelectedCountLimit() != null && minimumCacheType.compareTo(SelectionCacheType.JUST_IN_TIME) > 0) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with selectedCountLimit (" + config.getSelectedCountLimit() + ") has a minimumCacheType (" + minimumCacheType + ") that is higher than " + SelectionCacheType.JUST_IN_TIME + "."); } } private ValueSelector<Solution_> applySelectedLimit(ValueSelector<Solution_> valueSelector) { if (config.getSelectedCountLimit() != null) { valueSelector = new SelectedCountLimitValueSelector<>(valueSelector, config.getSelectedCountLimit()); } return valueSelector; } private ValueSelector<Solution_> applyNearbySelection(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor, SelectionCacheType minimumCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { NearbySelectionConfig nearbySelectionConfig = config.getNearbySelectionConfig(); boolean randomSelection = resolvedSelectionOrder.toRandomSelectionBoolean(); NearbyDistanceMeter<?, ?> nearbyDistanceMeter = configPolicy.getClassInstanceCache().newInstance(nearbySelectionConfig, "nearbyDistanceMeterClass", nearbySelectionConfig.getNearbyDistanceMeterClass()); // TODO Check nearbyDistanceMeterClass.getGenericInterfaces() to confirm generic type S is an entityClass NearbyRandom nearbyRandom = NearbyRandomFactory.create(nearbySelectionConfig).buildNearbyRandom(randomSelection); if (nearbySelectionConfig.getOriginEntitySelectorConfig() != null) { EntitySelector<Solution_> originEntitySelector = EntitySelectorFactory .<Solution_> create(nearbySelectionConfig.getOriginEntitySelectorConfig()) .buildEntitySelector(configPolicy, minimumCacheType, resolvedSelectionOrder); return new NearEntityNearbyValueSelector<>(valueSelector, originEntitySelector, nearbyDistanceMeter, nearbyRandom, randomSelection); } else if (nearbySelectionConfig.getOriginValueSelectorConfig() != null) { ValueSelector<Solution_> originValueSelector = ValueSelectorFactory .<Solution_> create(nearbySelectionConfig.getOriginValueSelectorConfig()) .buildValueSelector(configPolicy, entityDescriptor, minimumCacheType, resolvedSelectionOrder); if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException( "The valueSelectorConfig (" + config + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } if (!(originValueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException( "The originValueSelectorConfig (" + nearbySelectionConfig.getOriginValueSelectorConfig() + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + originValueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } return new NearValueNearbyValueSelector<>( (EntityIndependentValueSelector<Solution_>) valueSelector, (EntityIndependentValueSelector<Solution_>) originValueSelector, nearbyDistanceMeter, nearbyRandom, randomSelection); } else { throw new IllegalArgumentException("The valueSelector (" + config + ")'s nearbySelectionConfig (" + nearbySelectionConfig + ") requires an originEntitySelector or an originValueSelector."); } } private ValueSelector<Solution_> applyMimicRecording(HeuristicConfigPolicy<Solution_> configPolicy, ValueSelector<Solution_> valueSelector) { if (config.getId() != null) { if (config.getId().isEmpty()) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has an empty id (" + config.getId() + ")."); } if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with id (" + config.getId() + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } MimicRecordingValueSelector<Solution_> mimicRecordingValueSelector = new MimicRecordingValueSelector<>( (EntityIndependentValueSelector<Solution_>) valueSelector); configPolicy.addValueMimicRecorder(config.getId(), mimicRecordingValueSelector); valueSelector = mimicRecordingValueSelector; } return valueSelector; } ValueSelector<Solution_> applyListValueFiltering(HeuristicConfigPolicy<?> configPolicy, ListValueFilteringType listValueFilteringType, GenuineVariableDescriptor<Solution_> variableDescriptor, ValueSelector<Solution_> valueSelector) { if (variableDescriptor.isListVariable() && configPolicy.isUnassignedValuesAllowed() && listValueFilteringType != ListValueFilteringType.NONE) { if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with id (" + config.getId() + ") needs to be based on an " + EntityIndependentValueSelector.class.getSimpleName() + " (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = listValueFilteringType == ListValueFilteringType.ACCEPT_ASSIGNED ? new AssignedValueSelector<>(((EntityIndependentValueSelector<Solution_>) valueSelector)) : new UnassignedValueSelector<>(((EntityIndependentValueSelector<Solution_>) valueSelector)); } return valueSelector; } private ValueSelector<Solution_> applyReinitializeVariableFiltering(boolean applyReinitializeVariableFiltering, GenuineVariableDescriptor<Solution_> variableDescriptor, ValueSelector<Solution_> valueSelector) { if (applyReinitializeVariableFiltering && !variableDescriptor.isListVariable()) { valueSelector = new ReinitializeVariableValueSelector<>(valueSelector); } return valueSelector; } private ValueSelector<Solution_> applyDowncasting(ValueSelector<Solution_> valueSelector) { if (config.getDowncastEntityClass() != null) { valueSelector = new DowncastingValueSelector<>(valueSelector, config.getDowncastEntityClass()); } return valueSelector; } public enum ListValueFilteringType { NONE, ACCEPT_ASSIGNED, ACCEPT_UNASSIGNED, } }
apache/lucene
37,157
lucene/highlighter/src/test/org/apache/lucene/search/matchhighlight/TestMatchHighlighter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.search.matchhighlight; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.stream.Stream; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.en.PorterStemFilter; import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper; import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.synonym.SynonymGraphFilter; import org.apache.lucene.analysis.synonym.SynonymMap; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.IntervalQuery; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.CharsRef; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; public class TestMatchHighlighter extends LuceneTestCase { private static final String FLD_ID = "id"; private static final String FLD_TEXT1 = "text1"; private static final String FLD_TEXT2 = "text2"; private FieldType TYPE_TEXT_POSITIONS_OFFSETS; private FieldType TYPE_TEXT_POSITIONS; private PerFieldAnalyzerWrapper analyzer; @Before public void setup() throws IOException { TYPE_TEXT_POSITIONS = TextField.TYPE_STORED; TYPE_TEXT_POSITIONS_OFFSETS = new FieldType(TextField.TYPE_STORED); TYPE_TEXT_POSITIONS_OFFSETS.setIndexOptions( IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); TYPE_TEXT_POSITIONS_OFFSETS.freeze(); Map<String, Analyzer> fieldAnalyzers = new HashMap<>(); // Create an analyzer with some synonyms, just to showcase them. SynonymMap synonymMap = buildSynonymMap( new String[][] { {"moon\u0000shine", "firewater"}, {"firewater", "moon\u0000shine"}, }); // Make a non-empty offset gap so that break iterator doesn't go haywire on multivalues // glued together. final int offsetGap = RandomizedTest.randomIntBetween(1, 2); final int positionGap = RandomizedTest.randomFrom(new int[] {0, 1, 100}); Analyzer synonymsAnalyzer = new AnalyzerWithGaps( offsetGap, positionGap, new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new WhitespaceTokenizer(); TokenStream tokenStream = new SynonymGraphFilter(tokenizer, synonymMap, true); return new TokenStreamComponents(tokenizer, tokenStream); } }); fieldAnalyzers.put(FLD_TEXT1, synonymsAnalyzer); fieldAnalyzers.put(FLD_TEXT2, synonymsAnalyzer); analyzer = new PerFieldAnalyzerWrapper(new MissingAnalyzer(), fieldAnalyzers); } static SynonymMap buildSynonymMap(String[][] synonyms) throws IOException { SynonymMap.Builder builder = new SynonymMap.Builder(); for (String[] pair : synonyms) { MatcherAssert.assertThat(pair.length, Matchers.equalTo(2)); builder.add(new CharsRef(pair[0]), new CharsRef(pair[1]), true); } return builder.build(); } @Test public void testBasicUsage() throws Exception { new IndexBuilder(this::toField) .doc(FLD_TEXT1, "foo bar baz") .doc(FLD_TEXT1, "bar foo baz") .doc( fields -> { fields.add(FLD_TEXT1, "Very long content but not matching anything."); fields.add(FLD_TEXT2, "no foo but bar"); }) .build( analyzer, reader -> { Query query = new BooleanQuery.Builder() .add(new TermQuery(new Term(FLD_TEXT1, "foo")), BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(FLD_TEXT2, "bar")), BooleanClause.Occur.SHOULD) .build(); // In the most basic scenario, we run a search against a query, retrieve // top docs... IndexSearcher searcher = new IndexSearcher(reader); Sort sortOrder = Sort.INDEXORDER; // So that results are consistently ordered. TopDocs topDocs = searcher.search(query, 10, sortOrder); // ...and would want a fixed set of fields from those documents, some of them // possibly highlighted if they matched the query. // // This configures the highlighter so that the FLD_ID field is always returned // verbatim, // and FLD_TEXT1 is returned *only if it contained a query match*. MatchHighlighter highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter(FieldValueHighlighters.verbatimValue(FLD_ID)) .appendFieldHighlighter( FieldValueHighlighters.highlighted( 80 * 3, 1, new PassageFormatter("...", ">", "<"), FLD_TEXT1::equals)) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); // Note document field highlights are a stream over documents in topDocs. In the // remaining code we will just // collect them on the fly into a preformatted string. Stream<MatchHighlighter.DocHighlights> highlights = highlighter.highlight(topDocs, query); assertHighlights( toDocList(highlights), " 0. id: 0", " text1: >foo< bar baz", " 1. id: 1", " text1: bar >foo< baz", " 2. id: 2"); // In a more realistic use case, you'd want to show the value of a given field // *regardless* of whether it // contained a highlight or not -- it is odd that document "id: 2" above doesn't have // the 'text1' field // shown because that field wasn't part of the query match. // // Let's say the field is also potentially long; if it contains a match, // we would want to display the contextual snippet surrounding that match. If it does // not contain any // matches, we would want to display its content up to a given number of characters // (lead lines). // // Let's do this by adding an appropriate field highlighter on FLD_TEXT1. highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter(FieldValueHighlighters.verbatimValue(FLD_ID)) .appendFieldHighlighter( FieldValueHighlighters.highlighted( 80 * 3, 1, new PassageFormatter("...", ">", "<"), FLD_TEXT1::equals)) .appendFieldHighlighter( FieldValueHighlighters.maxLeadingCharacters(10, "...", Set.of(FLD_TEXT1))) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); assertHighlights( toDocList(highlighter.highlight(topDocs, query)), " 0. id: 0", " text1: >foo< bar baz", " 1. id: 1", " text1: bar >foo< baz", " 2. id: 2", " text1: Very long..."); // Field highlighters can apply to multiple fields and be chained for convenience. // For example, this defines a combined highlighter over both FLD_TEXT1 and FLD_TEXT2. Set<String> fields = Set.of(FLD_TEXT1, FLD_TEXT2); MatchHighlighter.FieldValueHighlighter highlightedOrAbbreviated = FieldValueHighlighters.highlighted( 80 * 3, 1, new PassageFormatter("...", ">", "<"), fields::contains) .or(FieldValueHighlighters.maxLeadingCharacters(10, "...", fields)); highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter(FieldValueHighlighters.verbatimValue(FLD_ID)) .appendFieldHighlighter(highlightedOrAbbreviated) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); assertHighlights( toDocList(highlighter.highlight(topDocs, query)), " 0. id: 0", " text1: >foo< bar baz", " 1. id: 1", " text1: bar >foo< baz", " 2. id: 2", " text1: Very long...", " text2: no foo but >bar<"); }); } @Test public void testSynonymHighlight() throws Exception { // There is nothing special needed to highlight or process complex queries, synonyms, etc. // Synonyms defined in the constructor of this class. new IndexBuilder(this::toField) .doc(FLD_TEXT1, "Where the moon shine falls, firewater flows.") .build( analyzer, reader -> { IndexSearcher searcher = new IndexSearcher(reader); Sort sortOrder = Sort.INDEXORDER; // So that results are consistently ordered. MatchHighlighter highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter( FieldValueHighlighters.highlighted( 80 * 3, 1, new PassageFormatter("...", ">", "<"), FLD_TEXT1::equals)) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); Query query = new TermQuery(new Term(FLD_TEXT1, "firewater")); assertHighlights( toDocList(highlighter.highlight(searcher.search(query, 10, sortOrder), query)), "0. text1: Where the >moon shine< falls, >firewater< flows."); query = new PhraseQuery(FLD_TEXT1, "moon", "shine"); assertHighlights( toDocList(highlighter.highlight(searcher.search(query, 10, sortOrder), query)), "0. text1: Where the >moon shine< falls, >firewater< flows."); }); } @Test public void testAnalyzedTextIntervals() throws Exception { SynonymMap synonymMap = buildSynonymMap( new String[][] { {"moon\u0000shine", "firewater"}, {"firewater", "moon\u0000shine"}, }); Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new StandardTokenizer(); TokenStream ts = tokenizer; ts = new LowerCaseFilter(ts); ts = new SynonymGraphFilter(ts, synonymMap, true); ts = new PorterStemFilter(ts); return new TokenStreamComponents(tokenizer, ts); } }; new IndexBuilder(this::toField) .doc(FLD_TEXT1, "Where the moon shine falls, firewater flows.") .build( analyzer, reader -> { IndexSearcher searcher = new IndexSearcher(reader); Sort sortOrder = Sort.INDEXORDER; // So that results are consistently ordered. MatchHighlighter highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter( FieldValueHighlighters.highlighted( 80 * 3, 1, new PassageFormatter("...", ">", "<"), FLD_TEXT1::equals)) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); { // [moon shine, firewater] are synonyms, tokens are lowercased. Porter stemming on. Query query = new IntervalQuery( FLD_TEXT1, Intervals.analyzedText("Firewater Fall", analyzer, FLD_TEXT1, 0, true)); assertHighlights( toDocList(highlighter.highlight(searcher.search(query, 10, sortOrder), query)), "0. text1: Where the >moon shine falls<, firewater flows."); } }); } @Test public void testStandardQueryParserIntervalFunctions() throws Exception { Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new StandardTokenizer(); TokenStream ts = tokenizer; ts = new LowerCaseFilter(ts); return new TokenStreamComponents(tokenizer, ts); } }; // Rerun the same test on fields with offsets and without offsets. for (String field : List.of(FLD_TEXT1, FLD_TEXT2)) { String inputDocument = "The quick brown fox jumps over the lazy dog"; String[][] queryResultPairs = new String[][] { {"fn:ordered(brown dog)", "0. %s: The quick >brown fox jumps over the lazy dog<"}, { "fn:within(fn:or(lazy quick) 1 fn:or(dog fox))", "0. %s: The quick brown fox jumps over the >lazy< dog" }, { "fn:containedBy(fox fn:ordered(brown fox dog))", "0. %s: The quick brown >fox< jumps over the lazy dog" }, { "fn:atLeast(2 quick fox \"furry dog\")", "0. %s: The >quick brown fox< jumps over the lazy dog" }, { "fn:maxgaps(0 fn:ordered(fn:or(quick lazy) fn:or(fox dog)))", "0. %s: The quick brown fox jumps over the >lazy dog<" }, { "fn:maxgaps(1 fn:ordered(fn:or(quick lazy) fn:or(fox dog)))", "0. %s: The >quick brown fox< jumps over the >lazy dog<" }, { "fn:maxwidth(2 fn:ordered(fn:or(quick lazy) fn:or(fox dog)))", "0. %s: The quick brown fox jumps over the >lazy dog<" }, { "fn:maxwidth(3 fn:ordered(fn:or(quick lazy) fn:or(fox dog)))", "0. %s: The >quick brown fox< jumps over the >lazy dog<" }, {"fn:or(quick \"fox\")", "0. %s: The >quick< brown >fox< jumps over the lazy dog"}, {"fn:or(\"quick fox\")"}, {"fn:phrase(quick brown fox)", "0. %s: The >quick brown fox< jumps over the lazy dog"}, {"fn:wildcard(jump*)", "0. %s: The quick brown fox >jumps< over the lazy dog"}, {"fn:wildcard(br*n)", "0. %s: The quick >brown< fox jumps over the lazy dog"}, {"fn:fuzzyTerm(fxo)", "0. %s: The quick brown >fox< jumps over the lazy dog"}, {"fn:or(dog fox)", "0. %s: The quick brown >fox< jumps over the lazy >dog<"}, { "fn:phrase(fn:ordered(quick fox) jumps)", "0. %s: The >quick brown fox jumps< over the lazy dog" }, {"fn:ordered(quick jumps dog)", "0. %s: The >quick brown fox jumps over the lazy dog<"}, { "fn:ordered(quick fn:or(fox dog))", "0. %s: The >quick brown fox< jumps over the lazy dog" }, { "fn:ordered(quick jumps fn:or(fox dog))", "0. %s: The >quick brown fox jumps over the lazy dog<" }, { "fn:unordered(dog jumps quick)", "0. %s: The >quick brown fox jumps over the lazy dog<" }, { "fn:unordered(fn:or(fox dog) quick)", "0. %s: The >quick brown fox< jumps over the lazy dog" }, { "fn:unordered(fn:phrase(brown fox) fn:phrase(fox jumps))", "0. %s: The quick >brown fox jumps< over the lazy dog" }, {"fn:ordered(fn:phrase(brown fox) fn:phrase(fox jumps))"}, {"fn:unorderedNoOverlaps(fn:phrase(brown fox) fn:phrase(fox jumps))"}, { "fn:before(fn:or(brown lazy) fox)", "0. %s: The quick >brown< fox jumps over the lazy dog" }, { "fn:before(fn:or(brown lazy) fn:or(dog fox))", "0. %s: The quick >brown< fox jumps over the >lazy< dog" }, { "fn:after(fn:or(brown lazy) fox)", "0. %s: The quick brown fox jumps over the >lazy< dog" }, { "fn:after(fn:or(brown lazy) fn:or(dog fox))", "0. %s: The quick brown fox jumps over the >lazy< dog" }, { "fn:within(fn:or(fox dog) 1 fn:or(quick lazy))", "0. %s: The quick brown fox jumps over the lazy >dog<" }, { "fn:within(fn:or(fox dog) 2 fn:or(quick lazy))", "0. %s: The quick brown >fox< jumps over the lazy >dog<" }, { "fn:notWithin(fn:or(fox dog) 1 fn:or(quick lazy))", "0. %s: The quick brown >fox< jumps over the lazy dog" }, { "fn:containedBy(fn:or(fox dog) fn:ordered(quick lazy))", "0. %s: The quick brown >fox< jumps over the lazy dog" }, { "fn:notContainedBy(fn:or(fox dog) fn:ordered(quick lazy))", "0. %s: The quick brown fox jumps over the lazy >dog<" }, { "fn:containing(fn:atLeast(2 quick fox dog) jumps)", "0. %s: The quick brown >fox jumps over the lazy dog<" }, { "fn:notContaining(fn:ordered(fn:or(the The) fn:or(fox dog)) brown)", "0. %s: The quick brown fox jumps over >the lazy dog<" }, { "fn:overlapping(fn:phrase(brown fox) fn:phrase(fox jumps))", "0. %s: The quick >brown fox< jumps over the lazy dog" }, { "fn:overlapping(fn:or(fox dog) fn:extend(lazy 2 2))", "0. %s: The quick brown fox jumps over the lazy >dog<" }, { "fn:nonOverlapping(fn:phrase(brown fox) fn:phrase(lazy dog))", "0. %s: The quick >brown fox< jumps over the lazy dog" }, { "fn:nonOverlapping(fn:or(fox dog) fn:extend(lazy 2 2))", "0. %s: The quick brown >fox< jumps over the lazy dog" }, { "fn:atLeast(2 fn:unordered(furry dog) fn:unordered(brown dog) lazy quick)", "0. %s: The >quick >brown fox jumps over the lazy<<> dog<" }, {"fn:extend(fox 1 2)", "0. %s: The quick >brown fox jumps over< the lazy dog"}, { "fn:extend(fn:or(dog fox) 2 0)", "0. %s: The >quick brown fox< jumps over >the lazy dog<" }, { "fn:containedBy(fn:or(fox dog) fn:extend(lazy 3 3))", "0. %s: The quick brown fox jumps over the lazy >dog<" }, { "fn:notContainedBy(fn:or(fox dog) fn:extend(lazy 3 3))", "0. %s: The quick brown >fox< jumps over the lazy dog" }, { "fn:containing(fn:extend(fn:or(lazy brown) 1 1) fn:or(fox dog))", "0. %s: The >quick brown fox< jumps over >the lazy dog<" }, { "fn:notContaining(fn:extend(fn:or(fox dog) 1 0) fn:or(brown yellow))", "0. %s: The quick brown fox jumps over the >lazy dog<" } }; // Verify assertions. new IndexBuilder(this::toField) // Just one document and multiple interval queries to check. .doc(field, inputDocument) .build( analyzer, reader -> { IndexSearcher searcher = new IndexSearcher(reader); Sort sortOrder = Sort.INDEXORDER; // So that results are consistently ordered. MatchHighlighter highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter( FieldValueHighlighters.highlighted( 80 * 3, 1, new PassageFormatter("...", ">", "<"), _ -> true)) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); StandardQueryParser qp = new StandardQueryParser(analyzer); // Run all pairs of query-expected highlight. List<String> errors = new ArrayList<>(); for (var queryHighlightPair : queryResultPairs) { assert queryHighlightPair.length >= 1; String queryString = queryHighlightPair[0]; var query = qp.parse(queryString, field); var expected = Arrays.stream(queryHighlightPair) .skip(1) .map(v -> String.format(Locale.ROOT, v, field)) .toArray(String[]::new); try { assertHighlights( toDocList( highlighter.highlight(searcher.search(query, 10, sortOrder), query)), expected); } catch (AssertionError e) { errors.add("MISMATCH: query: " + queryString + "\n" + e.getMessage()); } } if (errors.size() > 0) { throw new AssertionError(String.join("\n\n", errors)); } }); } } @Test public void testCustomFieldHighlightHandling() throws Exception { // Match highlighter is a showcase of individual components in this package, suitable // to create any kind of field-display designs. // // In this example we will build a custom field highlighting handler that // highlights matches over a multivalued field, shows that field's values if it received // no matches and limits the number of values displayed to at most 2 (with an appropriate // message). new IndexBuilder(this::toField) // Just one document, one field, four values. .doc(FLD_TEXT1, "foo bar", "bar foo baz", "bar baz foo", "baz baz baz") .build( analyzer, reader -> { IndexSearcher searcher = new IndexSearcher(reader); Sort sortOrder = Sort.INDEXORDER; // Let's start with the simple predefined highlighter so that the field's value shows // and is highlighted when it was part of the hit. MatchHighlighter.FieldValueHighlighter highlighted = FieldValueHighlighters.highlighted( 80 * 3, 2, new PassageFormatter("...", ">", "<"), FLD_TEXT1::equals); MatchHighlighter highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter(highlighted) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); Query query = new TermQuery(new Term(FLD_TEXT1, "foo")); TopDocs topDocs = searcher.search(query, 10, sortOrder); // Note the highlighter is configured with at most 2 snippets so the match on the // third value ("bar baz foo") is omitted. Ellipsis isn't inserted too because // values are displayed in full. assertHighlights( toDocList(highlighter.highlight(topDocs, query)), "0. text1: >foo< bar, bar >foo< baz"); // So the above works fine if the field received a match but omits it otherwise. We // can // force the display of this field by chaining with verbatim value highlighter: highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter( highlighted.or(FieldValueHighlighters.verbatimValue(FLD_TEXT1))) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); assertHighlights( toDocList(highlighter.highlight(topDocs, new MatchAllDocsQuery())), "0. text1: foo bar, bar foo baz, bar baz foo, baz baz baz"); // But this is not exactly what we'd like because we want to limit the display of // values to the first two. // Let's just write a custom field highlighter handler that does it. class AtMostNValuesHighlighter implements MatchHighlighter.FieldValueHighlighter { private final String field; private final int limit; AtMostNValuesHighlighter(String field, int limit) { this.field = field; this.limit = limit; } @Override public boolean isApplicable(String field, boolean hasMatches) { return Objects.equals(field, this.field); } @Override public List<String> format( String field, List<String> values, String contiguousValue, List<OffsetRange> valueRanges, List<MatchHighlighter.QueryOffsetRange> matchOffsets) { if (values.size() <= limit) { return values; } else { List<String> collected = values.subList(0, limit); int remaining = values.size() - collected.size(); collected.add(String.format(Locale.ROOT, "[%d omitted]", remaining)); return collected; } } @Override public Collection<String> alwaysFetchedFields() { return Collections.singleton(field); } } // We can now chain it as usual and contemplate the result. highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter( highlighted.or(new AtMostNValuesHighlighter(FLD_TEXT1, 2))) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); assertHighlights( toDocList(highlighter.highlight(topDocs, query)), "0. text1: >foo< bar, bar >foo< baz"); assertHighlights( toDocList(highlighter.highlight(topDocs, new MatchAllDocsQuery())), "0. text1: foo bar, bar foo baz, [2 omitted]"); }); } @Test public void testHighlightMoreQueriesAtOnceShowoff() throws Exception { // Match highlighter underlying components are powerful enough to build interesting, // if not always super-practical, things. In this case, we would like to highlight // a set of matches of *more than one* query over the same set of input documents. This includes // highest-scoring passage resolution (from multiple hits) and different highlight markers // for each query. new IndexBuilder(this::toField) .doc(FLD_TEXT1, "foo bar baz") .doc(FLD_TEXT1, "foo baz bar") .build( analyzer, reader -> { // Let's start with the two queries. The first one will be an unordered // query for (foo, baz) with a max gap of 1; let's use intervals for this. Query q1 = new IntervalQuery( FLD_TEXT1, Intervals.maxgaps( 1, Intervals.unordered(Intervals.term("foo"), Intervals.term("baz")))); // The second one will be a simpler term query for "bar". Query q2 = new TermQuery(new Term(FLD_TEXT1, "bar")); // Let's fetch matching documents by combining the two into a Boolean query. Query query = new BooleanQuery.Builder() .add(q1, BooleanClause.Occur.SHOULD) .add(q2, BooleanClause.Occur.SHOULD) .build(); IndexSearcher searcher = new IndexSearcher(reader); Sort sortOrder = Sort.INDEXORDER; // So that results are consistently ordered. TopDocs topDocs = searcher.search(query, 10, sortOrder); // If we use the "regular" highlighter, the result will be slightly odd: a nested // highlight over "bar" within the first match. Also, you can't distinguish which of // the sub-queries // caused which highlight marker... but if it were HTML then you could give the span // some semi-translucent background and layered matches would be visible. MatchHighlighter highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter( FieldValueHighlighters.highlighted( 80 * 3, 1, new PassageFormatter("...", "<span>", "</span>"), FLD_TEXT1::equals)) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); assertHighlights( toDocList(highlighter.highlight(topDocs, query)), "0. text1: <span>foo <span>bar</span> baz</span>", "1. text1: <span>foo baz</span> <span>bar</span>"); // To separate highlights for multiple queries we'll pass them separately to the // highlighter and differentiate highlight markers upon their application. Let's start // with the customized // field highlighter first. This utilizes the fact that match ranges passed from // MatchHighlighter // contain a reference to the original query which brought up the match. class SeparateMarkerFieldHighlighter implements MatchHighlighter.FieldValueHighlighter { private final String field; private final Map<Query, String> queryClassMap; SeparateMarkerFieldHighlighter(String field, Map<Query, String> queryClassMap) { this.field = field; this.queryClassMap = queryClassMap; } @Override public boolean isApplicable(String field, boolean hasMatches) { return Objects.equals(field, this.field) && hasMatches; } @Override public List<String> format( String field, List<String> values, String contiguousValue, List<OffsetRange> valueRanges, List<MatchHighlighter.QueryOffsetRange> matchOffsets) { PassageSelector passageSelector = new PassageSelector(); int maxPassageWindow = 80; int maxPassages = 3; List<Passage> bestPassages = passageSelector.pickBest( contiguousValue, matchOffsets, maxPassageWindow, maxPassages, valueRanges); // We know the offset ranges passed to us by MatchHighlighter are instances of // QueryOffsetRange // so we compute the class based on that. Function<OffsetRange, String> queryToClass = (range) -> queryClassMap.get(((MatchHighlighter.QueryOffsetRange) range).query); PassageFormatter passageFormatter = new PassageFormatter( "...", (range) -> "<span class='" + queryToClass.apply(range) + "'>", (_) -> "</span>"); return passageFormatter.format(contiguousValue, bestPassages, valueRanges); } } // And this is pretty much it. We now set up query classes to display, set up the // highlighter... Map<Query, String> queryClassMap = Map.of(q1, "q1", q2, "q2"); highlighter = new MatchHighlighter(searcher, analyzer) .appendFieldHighlighter( new SeparateMarkerFieldHighlighter(FLD_TEXT1, queryClassMap)) .appendFieldHighlighter(FieldValueHighlighters.skipRemaining()); // ...and run highlighting. Note the query passed to the highlighter are individual // sub-clauses // of the Boolean query used to fetch documents. assertHighlights( toDocList(highlighter.highlight(topDocs, q1, q2)), "0. text1: <span class='q1'>foo <span class='q2'>bar</span> baz</span>", "1. text1: <span class='q1'>foo baz</span> <span class='q2'>bar</span>"); }); } private void assertHighlights(List<List<String>> docList, String... expectedFormattedLines) { ArrayList<String> actualLines = new ArrayList<>(); for (int doc = 0; doc < docList.size(); doc++) { List<String> fields = docList.get(doc); for (int i = 0; i < fields.size(); i++) { actualLines.add( (i == 0 ? String.format(Locale.ROOT, "%2d. ", doc) : " ") + fields.get(i)); } } var expectedTrimmed = Stream.of(expectedFormattedLines).map(String::trim).toList(); var actualTrimmed = actualLines.stream().map(String::trim).toList(); if (!Objects.equals(expectedTrimmed, actualTrimmed)) { throw new AssertionError( "Actual hits were:\n" + String.join("\n", actualTrimmed) + "\n\nbut expected them to be:\n" + String.join("\n", expectedTrimmed)); } } private List<List<String>> toDocList(Stream<MatchHighlighter.DocHighlights> highlights) { return highlights .map( docHighlights -> docHighlights.fields.entrySet().stream() .map(e -> e.getKey() + ": " + String.join(", ", e.getValue())) .toList()) .toList(); } private IndexableField toField(String name, String value) { switch (name) { case FLD_TEXT1: return new Field(name, value, TYPE_TEXT_POSITIONS_OFFSETS); case FLD_TEXT2: return new Field(name, value, TYPE_TEXT_POSITIONS); default: throw new AssertionError("Don't know how to handle this field: " + name); } } }
googleapis/google-cloud-java
37,134
java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/DataplexExternalTable.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1/dataplex_spec.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1; /** * * * <pre> * External table registered by Dataplex. * Dataplex publishes data discovered from an asset into multiple other systems * (BigQuery, DPMS) in form of tables. We call them "external tables". External * tables are also synced into the Data Catalog. * This message contains pointers to * those external tables (fully qualified name, resource name et cetera) within * the Data Catalog. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.DataplexExternalTable} */ public final class DataplexExternalTable extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.DataplexExternalTable) DataplexExternalTableOrBuilder { private static final long serialVersionUID = 0L; // Use DataplexExternalTable.newBuilder() to construct. private DataplexExternalTable(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DataplexExternalTable() { system_ = 0; fullyQualifiedName_ = ""; googleCloudResource_ = ""; dataCatalogEntry_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DataplexExternalTable(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.DataplexSpecProto .internal_static_google_cloud_datacatalog_v1_DataplexExternalTable_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.DataplexSpecProto .internal_static_google_cloud_datacatalog_v1_DataplexExternalTable_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.DataplexExternalTable.class, com.google.cloud.datacatalog.v1.DataplexExternalTable.Builder.class); } public static final int SYSTEM_FIELD_NUMBER = 1; private int system_ = 0; /** * * * <pre> * Service in which the external table is registered. * </pre> * * <code>.google.cloud.datacatalog.v1.IntegratedSystem system = 1;</code> * * @return The enum numeric value on the wire for system. */ @java.lang.Override public int getSystemValue() { return system_; } /** * * * <pre> * Service in which the external table is registered. * </pre> * * <code>.google.cloud.datacatalog.v1.IntegratedSystem system = 1;</code> * * @return The system. */ @java.lang.Override public com.google.cloud.datacatalog.v1.IntegratedSystem getSystem() { com.google.cloud.datacatalog.v1.IntegratedSystem result = com.google.cloud.datacatalog.v1.IntegratedSystem.forNumber(system_); return result == null ? com.google.cloud.datacatalog.v1.IntegratedSystem.UNRECOGNIZED : result; } public static final int FULLY_QUALIFIED_NAME_FIELD_NUMBER = 28; @SuppressWarnings("serial") private volatile java.lang.Object fullyQualifiedName_ = ""; /** * * * <pre> * Fully qualified name (FQN) of the external table. * </pre> * * <code>string fully_qualified_name = 28;</code> * * @return The fullyQualifiedName. */ @java.lang.Override public java.lang.String getFullyQualifiedName() { java.lang.Object ref = fullyQualifiedName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); fullyQualifiedName_ = s; return s; } } /** * * * <pre> * Fully qualified name (FQN) of the external table. * </pre> * * <code>string fully_qualified_name = 28;</code> * * @return The bytes for fullyQualifiedName. */ @java.lang.Override public com.google.protobuf.ByteString getFullyQualifiedNameBytes() { java.lang.Object ref = fullyQualifiedName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); fullyQualifiedName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GOOGLE_CLOUD_RESOURCE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object googleCloudResource_ = ""; /** * * * <pre> * Google Cloud resource name of the external table. * </pre> * * <code>string google_cloud_resource = 3;</code> * * @return The googleCloudResource. */ @java.lang.Override public java.lang.String getGoogleCloudResource() { java.lang.Object ref = googleCloudResource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); googleCloudResource_ = s; return s; } } /** * * * <pre> * Google Cloud resource name of the external table. * </pre> * * <code>string google_cloud_resource = 3;</code> * * @return The bytes for googleCloudResource. */ @java.lang.Override public com.google.protobuf.ByteString getGoogleCloudResourceBytes() { java.lang.Object ref = googleCloudResource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); googleCloudResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATA_CATALOG_ENTRY_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object dataCatalogEntry_ = ""; /** * * * <pre> * Name of the Data Catalog entry representing the external table. * </pre> * * <code>string data_catalog_entry = 4;</code> * * @return The dataCatalogEntry. */ @java.lang.Override public java.lang.String getDataCatalogEntry() { java.lang.Object ref = dataCatalogEntry_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataCatalogEntry_ = s; return s; } } /** * * * <pre> * Name of the Data Catalog entry representing the external table. * </pre> * * <code>string data_catalog_entry = 4;</code> * * @return The bytes for dataCatalogEntry. */ @java.lang.Override public com.google.protobuf.ByteString getDataCatalogEntryBytes() { java.lang.Object ref = dataCatalogEntry_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataCatalogEntry_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (system_ != com.google.cloud.datacatalog.v1.IntegratedSystem.INTEGRATED_SYSTEM_UNSPECIFIED .getNumber()) { output.writeEnum(1, system_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(googleCloudResource_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, googleCloudResource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataCatalogEntry_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, dataCatalogEntry_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullyQualifiedName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 28, fullyQualifiedName_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (system_ != com.google.cloud.datacatalog.v1.IntegratedSystem.INTEGRATED_SYSTEM_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, system_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(googleCloudResource_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, googleCloudResource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataCatalogEntry_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, dataCatalogEntry_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullyQualifiedName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(28, fullyQualifiedName_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1.DataplexExternalTable)) { return super.equals(obj); } com.google.cloud.datacatalog.v1.DataplexExternalTable other = (com.google.cloud.datacatalog.v1.DataplexExternalTable) obj; if (system_ != other.system_) return false; if (!getFullyQualifiedName().equals(other.getFullyQualifiedName())) return false; if (!getGoogleCloudResource().equals(other.getGoogleCloudResource())) return false; if (!getDataCatalogEntry().equals(other.getDataCatalogEntry())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SYSTEM_FIELD_NUMBER; hash = (53 * hash) + system_; hash = (37 * hash) + FULLY_QUALIFIED_NAME_FIELD_NUMBER; hash = (53 * hash) + getFullyQualifiedName().hashCode(); hash = (37 * hash) + GOOGLE_CLOUD_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getGoogleCloudResource().hashCode(); hash = (37 * hash) + DATA_CATALOG_ENTRY_FIELD_NUMBER; hash = (53 * hash) + getDataCatalogEntry().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1.DataplexExternalTable prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * External table registered by Dataplex. * Dataplex publishes data discovered from an asset into multiple other systems * (BigQuery, DPMS) in form of tables. We call them "external tables". External * tables are also synced into the Data Catalog. * This message contains pointers to * those external tables (fully qualified name, resource name et cetera) within * the Data Catalog. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.DataplexExternalTable} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.DataplexExternalTable) com.google.cloud.datacatalog.v1.DataplexExternalTableOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.DataplexSpecProto .internal_static_google_cloud_datacatalog_v1_DataplexExternalTable_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.DataplexSpecProto .internal_static_google_cloud_datacatalog_v1_DataplexExternalTable_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.DataplexExternalTable.class, com.google.cloud.datacatalog.v1.DataplexExternalTable.Builder.class); } // Construct using com.google.cloud.datacatalog.v1.DataplexExternalTable.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; system_ = 0; fullyQualifiedName_ = ""; googleCloudResource_ = ""; dataCatalogEntry_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1.DataplexSpecProto .internal_static_google_cloud_datacatalog_v1_DataplexExternalTable_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1.DataplexExternalTable getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1.DataplexExternalTable.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1.DataplexExternalTable build() { com.google.cloud.datacatalog.v1.DataplexExternalTable result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1.DataplexExternalTable buildPartial() { com.google.cloud.datacatalog.v1.DataplexExternalTable result = new com.google.cloud.datacatalog.v1.DataplexExternalTable(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.datacatalog.v1.DataplexExternalTable result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.system_ = system_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.fullyQualifiedName_ = fullyQualifiedName_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.googleCloudResource_ = googleCloudResource_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.dataCatalogEntry_ = dataCatalogEntry_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1.DataplexExternalTable) { return mergeFrom((com.google.cloud.datacatalog.v1.DataplexExternalTable) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1.DataplexExternalTable other) { if (other == com.google.cloud.datacatalog.v1.DataplexExternalTable.getDefaultInstance()) return this; if (other.system_ != 0) { setSystemValue(other.getSystemValue()); } if (!other.getFullyQualifiedName().isEmpty()) { fullyQualifiedName_ = other.fullyQualifiedName_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getGoogleCloudResource().isEmpty()) { googleCloudResource_ = other.googleCloudResource_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getDataCatalogEntry().isEmpty()) { dataCatalogEntry_ = other.dataCatalogEntry_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { system_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 26: { googleCloudResource_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { dataCatalogEntry_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 case 226: { fullyQualifiedName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 226 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int system_ = 0; /** * * * <pre> * Service in which the external table is registered. * </pre> * * <code>.google.cloud.datacatalog.v1.IntegratedSystem system = 1;</code> * * @return The enum numeric value on the wire for system. */ @java.lang.Override public int getSystemValue() { return system_; } /** * * * <pre> * Service in which the external table is registered. * </pre> * * <code>.google.cloud.datacatalog.v1.IntegratedSystem system = 1;</code> * * @param value The enum numeric value on the wire for system to set. * @return This builder for chaining. */ public Builder setSystemValue(int value) { system_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Service in which the external table is registered. * </pre> * * <code>.google.cloud.datacatalog.v1.IntegratedSystem system = 1;</code> * * @return The system. */ @java.lang.Override public com.google.cloud.datacatalog.v1.IntegratedSystem getSystem() { com.google.cloud.datacatalog.v1.IntegratedSystem result = com.google.cloud.datacatalog.v1.IntegratedSystem.forNumber(system_); return result == null ? com.google.cloud.datacatalog.v1.IntegratedSystem.UNRECOGNIZED : result; } /** * * * <pre> * Service in which the external table is registered. * </pre> * * <code>.google.cloud.datacatalog.v1.IntegratedSystem system = 1;</code> * * @param value The system to set. * @return This builder for chaining. */ public Builder setSystem(com.google.cloud.datacatalog.v1.IntegratedSystem value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; system_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Service in which the external table is registered. * </pre> * * <code>.google.cloud.datacatalog.v1.IntegratedSystem system = 1;</code> * * @return This builder for chaining. */ public Builder clearSystem() { bitField0_ = (bitField0_ & ~0x00000001); system_ = 0; onChanged(); return this; } private java.lang.Object fullyQualifiedName_ = ""; /** * * * <pre> * Fully qualified name (FQN) of the external table. * </pre> * * <code>string fully_qualified_name = 28;</code> * * @return The fullyQualifiedName. */ public java.lang.String getFullyQualifiedName() { java.lang.Object ref = fullyQualifiedName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); fullyQualifiedName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Fully qualified name (FQN) of the external table. * </pre> * * <code>string fully_qualified_name = 28;</code> * * @return The bytes for fullyQualifiedName. */ public com.google.protobuf.ByteString getFullyQualifiedNameBytes() { java.lang.Object ref = fullyQualifiedName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); fullyQualifiedName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Fully qualified name (FQN) of the external table. * </pre> * * <code>string fully_qualified_name = 28;</code> * * @param value The fullyQualifiedName to set. * @return This builder for chaining. */ public Builder setFullyQualifiedName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } fullyQualifiedName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Fully qualified name (FQN) of the external table. * </pre> * * <code>string fully_qualified_name = 28;</code> * * @return This builder for chaining. */ public Builder clearFullyQualifiedName() { fullyQualifiedName_ = getDefaultInstance().getFullyQualifiedName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Fully qualified name (FQN) of the external table. * </pre> * * <code>string fully_qualified_name = 28;</code> * * @param value The bytes for fullyQualifiedName to set. * @return This builder for chaining. */ public Builder setFullyQualifiedNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); fullyQualifiedName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object googleCloudResource_ = ""; /** * * * <pre> * Google Cloud resource name of the external table. * </pre> * * <code>string google_cloud_resource = 3;</code> * * @return The googleCloudResource. */ public java.lang.String getGoogleCloudResource() { java.lang.Object ref = googleCloudResource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); googleCloudResource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Google Cloud resource name of the external table. * </pre> * * <code>string google_cloud_resource = 3;</code> * * @return The bytes for googleCloudResource. */ public com.google.protobuf.ByteString getGoogleCloudResourceBytes() { java.lang.Object ref = googleCloudResource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); googleCloudResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Google Cloud resource name of the external table. * </pre> * * <code>string google_cloud_resource = 3;</code> * * @param value The googleCloudResource to set. * @return This builder for chaining. */ public Builder setGoogleCloudResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } googleCloudResource_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Google Cloud resource name of the external table. * </pre> * * <code>string google_cloud_resource = 3;</code> * * @return This builder for chaining. */ public Builder clearGoogleCloudResource() { googleCloudResource_ = getDefaultInstance().getGoogleCloudResource(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Google Cloud resource name of the external table. * </pre> * * <code>string google_cloud_resource = 3;</code> * * @param value The bytes for googleCloudResource to set. * @return This builder for chaining. */ public Builder setGoogleCloudResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); googleCloudResource_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object dataCatalogEntry_ = ""; /** * * * <pre> * Name of the Data Catalog entry representing the external table. * </pre> * * <code>string data_catalog_entry = 4;</code> * * @return The dataCatalogEntry. */ public java.lang.String getDataCatalogEntry() { java.lang.Object ref = dataCatalogEntry_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataCatalogEntry_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the Data Catalog entry representing the external table. * </pre> * * <code>string data_catalog_entry = 4;</code> * * @return The bytes for dataCatalogEntry. */ public com.google.protobuf.ByteString getDataCatalogEntryBytes() { java.lang.Object ref = dataCatalogEntry_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataCatalogEntry_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the Data Catalog entry representing the external table. * </pre> * * <code>string data_catalog_entry = 4;</code> * * @param value The dataCatalogEntry to set. * @return This builder for chaining. */ public Builder setDataCatalogEntry(java.lang.String value) { if (value == null) { throw new NullPointerException(); } dataCatalogEntry_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Name of the Data Catalog entry representing the external table. * </pre> * * <code>string data_catalog_entry = 4;</code> * * @return This builder for chaining. */ public Builder clearDataCatalogEntry() { dataCatalogEntry_ = getDefaultInstance().getDataCatalogEntry(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Name of the Data Catalog entry representing the external table. * </pre> * * <code>string data_catalog_entry = 4;</code> * * @param value The bytes for dataCatalogEntry to set. * @return This builder for chaining. */ public Builder setDataCatalogEntryBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); dataCatalogEntry_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.DataplexExternalTable) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.DataplexExternalTable) private static final com.google.cloud.datacatalog.v1.DataplexExternalTable DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.DataplexExternalTable(); } public static com.google.cloud.datacatalog.v1.DataplexExternalTable getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DataplexExternalTable> PARSER = new com.google.protobuf.AbstractParser<DataplexExternalTable>() { @java.lang.Override public DataplexExternalTable parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DataplexExternalTable> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DataplexExternalTable> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1.DataplexExternalTable getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }